Actual source code: matis.c

petsc-master 2016-09-28
Report Typos and Errors
  1: /*
  2:     Creates a matrix class for using the Neumann-Neumann type preconditioners.
  3:     This stores the matrices in globally unassembled form. Each processor
  4:     assembles only its local Neumann problem and the parallel matrix vector
  5:     product is handled "implicitly".

  7:     Currently this allows for only one subdomain per processor.
  8: */

 10:  #include <../src/mat/impls/is/matis.h>

 12: #define MATIS_MAX_ENTRIES_INSERTION 2048

 14: static PetscErrorCode MatISComputeSF_Private(Mat);

 18: PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B)
 19: {
 20:   Mat                    C,lC,lA;
 21:   ISLocalToGlobalMapping rl2g,cl2g;
 22:   PetscBool              notset = PETSC_FALSE,cong = PETSC_TRUE;
 23:   PetscErrorCode         ierr;

 26:   if (reuse == MAT_REUSE_MATRIX) {
 27:     PetscBool rcong,ccong;

 29:     PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);
 30:     PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);
 31:     cong = (PetscBool)(rcong || ccong);
 32:   }
 33:   if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) {
 34:     MatCreate(PetscObjectComm((PetscObject)A),&C);
 35:   } else {
 36:     PetscObjectTypeCompare((PetscObject)*B,MATIS,&notset);
 37:     C = *B;
 38:   }

 40:   if (!notset) {
 41:     MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);
 42:     MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));
 43:     MatSetType(C,MATIS);
 44:     MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);
 45:     MatSetLocalToGlobalMapping(C,cl2g,rl2g);
 46:   }

 48:   /* perform local transposition */
 49:   MatISGetLocalMat(A,&lA);
 50:   MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);
 51:   MatISSetLocalMat(C,lC);
 52:   MatDestroy(&lC);

 54:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
 55:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

 57:   if (reuse == MAT_INITIAL_MATRIX || *B != A) {
 58:     if (!cong) {
 59:       MatDestroy(B);
 60:     }
 61:     *B = C;
 62:   } else {
 63:     MatHeaderMerge(A,&C);
 64:   }
 65:   return(0);
 66: }

 70: PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
 71: {
 72:   Mat_IS         *is = (Mat_IS*)A->data;

 76:   if (!D) { /* this code branch is used by MatShift_IS */
 77:     VecSet(is->y,1.);
 78:   } else {
 79:     VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);
 80:     VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);
 81:   }
 82:   VecPointwiseDivide(is->y,is->y,is->counter);
 83:   MatDiagonalSet(is->A,is->y,insmode);
 84:   return(0);
 85: }

 89: PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
 90: {

 94:   MatDiagonalSet_IS(A,NULL,ADD_VALUES);
 95:   return(0);
 96: }

100: static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
101: {
103:   Mat_IS         *is = (Mat_IS*)A->data;
104:   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];

107: #if defined(PETSC_USE_DEBUG)
108:   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
109: #endif
110:   ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);
111:   ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);
112:   MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
113:   return(0);
114: }

118: static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
119: {
121:   Mat_IS         *is = (Mat_IS*)A->data;
122:   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];

125: #if defined(PETSC_USE_DEBUG)
126:   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
127: #endif
128:   ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);
129:   ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);
130:   MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);
131:   return(0);
132: }

136: static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
137: {
138:   PetscInt      *owners = map->range;
139:   PetscInt       n      = map->n;
140:   PetscSF        sf;
141:   PetscInt      *lidxs,*work = NULL;
142:   PetscSFNode   *ridxs;
143:   PetscMPIInt    rank;
144:   PetscInt       r, p = 0, len = 0;

148:   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
149:   MPI_Comm_rank(map->comm,&rank);
150:   PetscMalloc1(n,&lidxs);
151:   for (r = 0; r < n; ++r) lidxs[r] = -1;
152:   PetscMalloc1(N,&ridxs);
153:   for (r = 0; r < N; ++r) {
154:     const PetscInt idx = idxs[r];
155:     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
156:     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
157:       PetscLayoutFindOwner(map,idx,&p);
158:     }
159:     ridxs[r].rank = p;
160:     ridxs[r].index = idxs[r] - owners[p];
161:   }
162:   PetscSFCreate(map->comm,&sf);
163:   PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);
164:   PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
165:   PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);
166:   if (ogidxs) { /* communicate global idxs */
167:     PetscInt cum = 0,start,*work2;

169:     PetscMalloc1(n,&work);
170:     PetscCalloc1(N,&work2);
171:     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
172:     MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);
173:     start -= cum;
174:     cum = 0;
175:     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
176:     PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);
177:     PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);
178:     PetscFree(work2);
179:   }
180:   PetscSFDestroy(&sf);
181:   /* Compress and put in indices */
182:   for (r = 0; r < n; ++r)
183:     if (lidxs[r] >= 0) {
184:       if (work) work[len] = work[r];
185:       lidxs[len++] = r;
186:     }
187:   if (on) *on = len;
188:   if (oidxs) *oidxs = lidxs;
189:   if (ogidxs) *ogidxs = work;
190:   return(0);
191: }

195: static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
196: {
197:   Mat               locmat,newlocmat;
198:   Mat_IS            *newmatis;
199: #if defined(PETSC_USE_DEBUG)
200:   Vec               rtest,ltest;
201:   const PetscScalar *array;
202: #endif
203:   const PetscInt    *idxs;
204:   PetscInt          i,m,n;
205:   PetscErrorCode    ierr;

208:   if (scall == MAT_REUSE_MATRIX) {
209:     PetscBool ismatis;

211:     PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);
212:     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
213:     newmatis = (Mat_IS*)(*newmat)->data;
214:     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
215:     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
216:   }
217:   /* irow and icol may not have duplicate entries */
218: #if defined(PETSC_USE_DEBUG)
219:   MatCreateVecs(mat,&ltest,&rtest);
220:   ISGetLocalSize(irow,&n);
221:   ISGetIndices(irow,&idxs);
222:   for (i=0;i<n;i++) {
223:     VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);
224:   }
225:   VecAssemblyBegin(rtest);
226:   VecAssemblyEnd(rtest);
227:   VecGetLocalSize(rtest,&n);
228:   VecGetOwnershipRange(rtest,&m,NULL);
229:   VecGetArrayRead(rtest,&array);
230:   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
231:   VecRestoreArrayRead(rtest,&array);
232:   ISRestoreIndices(irow,&idxs);
233:   ISGetLocalSize(icol,&n);
234:   ISGetIndices(icol,&idxs);
235:   for (i=0;i<n;i++) {
236:     VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);
237:   }
238:   VecAssemblyBegin(ltest);
239:   VecAssemblyEnd(ltest);
240:   VecGetLocalSize(ltest,&n);
241:   VecGetOwnershipRange(ltest,&m,NULL);
242:   VecGetArrayRead(ltest,&array);
243:   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
244:   VecRestoreArrayRead(ltest,&array);
245:   ISRestoreIndices(icol,&idxs);
246:   VecDestroy(&rtest);
247:   VecDestroy(&ltest);
248: #endif
249:   if (scall == MAT_INITIAL_MATRIX) {
250:     Mat_IS                 *matis = (Mat_IS*)mat->data;
251:     ISLocalToGlobalMapping rl2g;
252:     IS                     is;
253:     PetscInt               *lidxs,*lgidxs,*newgidxs;
254:     PetscInt               ll,newloc;
255:     MPI_Comm               comm;

257:     PetscObjectGetComm((PetscObject)mat,&comm);
258:     ISGetLocalSize(irow,&m);
259:     ISGetLocalSize(icol,&n);
260:     MatCreate(comm,newmat);
261:     MatSetType(*newmat,MATIS);
262:     MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);
263:     /* communicate irow to their owners in the layout */
264:     ISGetIndices(irow,&idxs);
265:     PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);
266:     ISRestoreIndices(irow,&idxs);
267:     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
268:       MatISComputeSF_Private(mat);
269:     }
270:     PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));
271:     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
272:     PetscFree(lidxs);
273:     PetscFree(lgidxs);
274:     PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
275:     PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
276:     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
277:     PetscMalloc1(newloc,&newgidxs);
278:     PetscMalloc1(newloc,&lidxs);
279:     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
280:       if (matis->sf_leafdata[i]) {
281:         lidxs[newloc] = i;
282:         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
283:       }
284:     ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);
285:     ISLocalToGlobalMappingCreateIS(is,&rl2g);
286:     ISDestroy(&is);
287:     /* local is to extract local submatrix */
288:     newmatis = (Mat_IS*)(*newmat)->data;
289:     ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);
290:     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
291:       PetscBool cong;
292:       PetscLayoutCompare(mat->rmap,mat->cmap,&cong);
293:       if (cong) mat->congruentlayouts = 1;
294:       else      mat->congruentlayouts = 0;
295:     }
296:     if (mat->congruentlayouts && irow == icol) {
297:       MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);
298:       PetscObjectReference((PetscObject)newmatis->getsub_ris);
299:       newmatis->getsub_cis = newmatis->getsub_ris;
300:     } else {
301:       ISLocalToGlobalMapping cl2g;

303:       /* communicate icol to their owners in the layout */
304:       ISGetIndices(icol,&idxs);
305:       PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);
306:       ISRestoreIndices(icol,&idxs);
307:       PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));
308:       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
309:       PetscFree(lidxs);
310:       PetscFree(lgidxs);
311:       PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);
312:       PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);
313:       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
314:       PetscMalloc1(newloc,&newgidxs);
315:       PetscMalloc1(newloc,&lidxs);
316:       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
317:         if (matis->csf_leafdata[i]) {
318:           lidxs[newloc] = i;
319:           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
320:         }
321:       ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);
322:       ISLocalToGlobalMappingCreateIS(is,&cl2g);
323:       ISDestroy(&is);
324:       /* local is to extract local submatrix */
325:       ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);
326:       MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);
327:       ISLocalToGlobalMappingDestroy(&cl2g);
328:     }
329:     ISLocalToGlobalMappingDestroy(&rl2g);
330:   } else {
331:     MatISGetLocalMat(*newmat,&newlocmat);
332:   }
333:   MatISGetLocalMat(mat,&locmat);
334:   newmatis = (Mat_IS*)(*newmat)->data;
335:   MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);
336:   if (scall == MAT_INITIAL_MATRIX) {
337:     MatISSetLocalMat(*newmat,newlocmat);
338:     MatDestroy(&newlocmat);
339:   }
340:   MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);
341:   MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);
342:   return(0);
343: }

347: static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
348: {
349:   Mat_IS         *a = (Mat_IS*)A->data,*b;
350:   PetscBool      ismatis;

354:   PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);
355:   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
356:   b = (Mat_IS*)B->data;
357:   MatCopy(a->A,b->A,str);
358:   return(0);
359: }

363: static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
364: {
365:   Vec               v;
366:   const PetscScalar *array;
367:   PetscInt          i,n;
368:   PetscErrorCode    ierr;

371:   *missing = PETSC_FALSE;
372:   MatCreateVecs(A,NULL,&v);
373:   MatGetDiagonal(A,v);
374:   VecGetLocalSize(v,&n);
375:   VecGetArrayRead(v,&array);
376:   for (i=0;i<n;i++) if (array[i] == 0.) break;
377:   VecRestoreArrayRead(v,&array);
378:   VecDestroy(&v);
379:   if (i != n) *missing = PETSC_TRUE;
380:   if (d) {
381:     *d = -1;
382:     if (*missing) {
383:       PetscInt rstart;
384:       MatGetOwnershipRange(A,&rstart,NULL);
385:       *d = i+rstart;
386:     }
387:   }
388:   return(0);
389: }

393: static PetscErrorCode MatISComputeSF_Private(Mat B)
394: {
395:   Mat_IS         *matis = (Mat_IS*)(B->data);
396:   const PetscInt *gidxs;

400:   MatGetSize(matis->A,&matis->sf_nleaves,NULL);
401:   MatGetLocalSize(B,&matis->sf_nroots,NULL);
402:   PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);
403:   ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);
404:   PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);
405:   ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);
406:   PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);
407:   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
408:     MatGetSize(matis->A,NULL,&matis->csf_nleaves);
409:     MatGetLocalSize(B,NULL,&matis->csf_nroots);
410:     PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);
411:     ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);
412:     PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);
413:     ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);
414:     PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);
415:   } else {
416:     matis->csf = matis->sf;
417:     matis->csf_nleaves = matis->sf_nleaves;
418:     matis->csf_nroots = matis->sf_nroots;
419:     matis->csf_leafdata = matis->sf_leafdata;
420:     matis->csf_rootdata = matis->sf_rootdata;
421:   }
422:   return(0);
423: }

427: /*@
428:    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.

430:    Collective on MPI_Comm

432:    Input Parameters:
433: +  B - the matrix
434: .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
435:            (same value is used for all local rows)
436: .  d_nnz - array containing the number of nonzeros in the various rows of the
437:            DIAGONAL portion of the local submatrix (possibly different for each row)
438:            or NULL, if d_nz is used to specify the nonzero structure.
439:            The size of this array is equal to the number of local rows, i.e 'm'.
440:            For matrices that will be factored, you must leave room for (and set)
441:            the diagonal entry even if it is zero.
442: .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
443:            submatrix (same value is used for all local rows).
444: -  o_nnz - array containing the number of nonzeros in the various rows of the
445:            OFF-DIAGONAL portion of the local submatrix (possibly different for
446:            each row) or NULL, if o_nz is used to specify the nonzero
447:            structure. The size of this array is equal to the number
448:            of local rows, i.e 'm'.

450:    If the *_nnz parameter is given then the *_nz parameter is ignored

452:    Level: intermediate

454:    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
455:           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
456:           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.

458: .keywords: matrix

460: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
461: @*/
462: PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
463: {

469:   PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
470:   return(0);
471: }

475: static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
476: {
477:   Mat_IS         *matis = (Mat_IS*)(B->data);
478:   PetscInt       bs,i,nlocalcols;

482:   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
483:   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
484:     MatISComputeSF_Private(B);
485:   }
486:   if (!d_nnz) {
487:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
488:   } else {
489:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
490:   }
491:   if (!o_nnz) {
492:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
493:   } else {
494:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
495:   }
496:   PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
497:   MatGetSize(matis->A,NULL,&nlocalcols);
498:   MatGetBlockSize(matis->A,&bs);
499:   PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
500:   for (i=0;i<matis->sf_nleaves;i++) {
501:     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
502:   }
503:   MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);
504:   for (i=0;i<matis->sf_nleaves/bs;i++) {
505:     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
506:   }
507:   MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
508:   for (i=0;i<matis->sf_nleaves/bs;i++) {
509:     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
510:   }
511:   MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
512:   return(0);
513: }

517: PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
518: {
519:   Mat_IS          *matis = (Mat_IS*)(A->data);
520:   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
521:   const PetscInt  *global_indices_r,*global_indices_c;
522:   PetscInt        i,j,bs,rows,cols;
523:   PetscInt        lrows,lcols;
524:   PetscInt        local_rows,local_cols;
525:   PetscMPIInt     nsubdomains;
526:   PetscBool       isdense,issbaij;
527:   PetscErrorCode  ierr;

530:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);
531:   MatGetSize(A,&rows,&cols);
532:   MatGetBlockSize(A,&bs);
533:   MatGetSize(matis->A,&local_rows,&local_cols);
534:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
535:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
536:   ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);
537:   if (A->rmap->mapping != A->cmap->mapping) {
538:     ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);
539:   } else {
540:     global_indices_c = global_indices_r;
541:   }

543:   if (issbaij) {
544:     MatGetRowUpperTriangular(matis->A);
545:   }
546:   /*
547:      An SF reduce is needed to sum up properly on shared rows.
548:      Note that generally preallocation is not exact, since it overestimates nonzeros
549:   */
550:   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
551:     MatISComputeSF_Private(A);
552:   }
553:   MatGetLocalSize(A,&lrows,&lcols);
554:   MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);
555:   /* All processes need to compute entire row ownership */
556:   PetscMalloc1(rows,&row_ownership);
557:   MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);
558:   for (i=0;i<nsubdomains;i++) {
559:     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
560:       row_ownership[j] = i;
561:     }
562:   }
563:   MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);

565:   /*
566:      my_dnz and my_onz contains exact contribution to preallocation from each local mat
567:      then, they will be summed up properly. This way, preallocation is always sufficient
568:   */
569:   PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);
570:   /* preallocation as a MATAIJ */
571:   if (isdense) { /* special case for dense local matrices */
572:     for (i=0;i<local_rows;i++) {
573:       PetscInt index_row = global_indices_r[i];
574:       for (j=i;j<local_rows;j++) {
575:         PetscInt owner = row_ownership[index_row];
576:         PetscInt index_col = global_indices_c[j];
577:         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
578:           my_dnz[i] += 1;
579:         } else { /* offdiag block */
580:           my_onz[i] += 1;
581:         }
582:         /* same as before, interchanging rows and cols */
583:         if (i != j) {
584:           owner = row_ownership[index_col];
585:           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
586:             my_dnz[j] += 1;
587:           } else {
588:             my_onz[j] += 1;
589:           }
590:         }
591:       }
592:     }
593:   } else if (matis->A->ops->getrowij) {
594:     const PetscInt *ii,*jj,*jptr;
595:     PetscBool      done;
596:     MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);
597:     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
598:     jptr = jj;
599:     for (i=0;i<local_rows;i++) {
600:       PetscInt index_row = global_indices_r[i];
601:       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
602:         PetscInt owner = row_ownership[index_row];
603:         PetscInt index_col = global_indices_c[*jptr];
604:         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
605:           my_dnz[i] += 1;
606:         } else { /* offdiag block */
607:           my_onz[i] += 1;
608:         }
609:         /* same as before, interchanging rows and cols */
610:         if (issbaij && index_col != index_row) {
611:           owner = row_ownership[index_col];
612:           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
613:             my_dnz[*jptr] += 1;
614:           } else {
615:             my_onz[*jptr] += 1;
616:           }
617:         }
618:       }
619:     }
620:     MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);
621:     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
622:   } else { /* loop over rows and use MatGetRow */
623:     for (i=0;i<local_rows;i++) {
624:       const PetscInt *cols;
625:       PetscInt       ncols,index_row = global_indices_r[i];
626:       MatGetRow(matis->A,i,&ncols,&cols,NULL);
627:       for (j=0;j<ncols;j++) {
628:         PetscInt owner = row_ownership[index_row];
629:         PetscInt index_col = global_indices_c[cols[j]];
630:         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
631:           my_dnz[i] += 1;
632:         } else { /* offdiag block */
633:           my_onz[i] += 1;
634:         }
635:         /* same as before, interchanging rows and cols */
636:         if (issbaij && index_col != index_row) {
637:           owner = row_ownership[index_col];
638:           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
639:             my_dnz[cols[j]] += 1;
640:           } else {
641:             my_onz[cols[j]] += 1;
642:           }
643:         }
644:       }
645:       MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
646:     }
647:   }
648:   ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);
649:   if (global_indices_c != global_indices_r) {
650:     ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);
651:   }
652:   PetscFree(row_ownership);

654:   /* Reduce my_dnz and my_onz */
655:   if (maxreduce) {
656:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
657:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
658:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
659:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
660:   } else {
661:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
662:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
663:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
664:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
665:   }
666:   PetscFree2(my_dnz,my_onz);

668:   /* Resize preallocation if overestimated */
669:   for (i=0;i<lrows;i++) {
670:     dnz[i] = PetscMin(dnz[i],lcols);
671:     onz[i] = PetscMin(onz[i],cols-lcols);
672:   }
673:   /* set preallocation */
674:   MatMPIAIJSetPreallocation(B,0,dnz,0,onz);
675:   for (i=0;i<lrows/bs;i++) {
676:     dnz[i] = dnz[i*bs]/bs;
677:     onz[i] = onz[i*bs]/bs;
678:   }
679:   MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);
680:   MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);
681:   MatPreallocateFinalize(dnz,onz);
682:   if (issbaij) {
683:     MatRestoreRowUpperTriangular(matis->A);
684:   }
685:   return(0);
686: }

690: static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
691: {
692:   Mat_IS         *matis = (Mat_IS*)(mat->data);
693:   Mat            local_mat;
694:   /* info on mat */
695:   PetscInt       bs,rows,cols,lrows,lcols;
696:   PetscInt       local_rows,local_cols;
697:   PetscBool      isdense,issbaij,isseqaij;
698:   PetscMPIInt    nsubdomains;
699:   /* values insertion */
700:   PetscScalar    *array;
701:   /* work */

705:   /* get info from mat */
706:   MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
707:   if (nsubdomains == 1) {
708:     if (reuse == MAT_INITIAL_MATRIX) {
709:       MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));
710:     } else {
711:       MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);
712:     }
713:     return(0);
714:   }
715:   MatGetSize(mat,&rows,&cols);
716:   MatGetBlockSize(mat,&bs);
717:   MatGetLocalSize(mat,&lrows,&lcols);
718:   MatGetSize(matis->A,&local_rows,&local_cols);
719:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
720:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);
721:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);

723:   if (reuse == MAT_INITIAL_MATRIX) {
724:     MatType     new_mat_type;
725:     PetscBool   issbaij_red;

727:     /* determining new matrix type */
728:     MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
729:     if (issbaij_red) {
730:       new_mat_type = MATSBAIJ;
731:     } else {
732:       if (bs>1) {
733:         new_mat_type = MATBAIJ;
734:       } else {
735:         new_mat_type = MATAIJ;
736:       }
737:     }

739:     MatCreate(PetscObjectComm((PetscObject)mat),M);
740:     MatSetSizes(*M,lrows,lcols,rows,cols);
741:     MatSetBlockSize(*M,bs);
742:     MatSetType(*M,new_mat_type);
743:     MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);
744:   } else {
745:     PetscInt mbs,mrows,mcols,mlrows,mlcols;
746:     /* some checks */
747:     MatGetBlockSize(*M,&mbs);
748:     MatGetSize(*M,&mrows,&mcols);
749:     MatGetLocalSize(*M,&mlrows,&mlcols);
750:     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
751:     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
752:     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
753:     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
754:     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
755:     MatZeroEntries(*M);
756:   }

758:   if (issbaij) {
759:     MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);
760:   } else {
761:     PetscObjectReference((PetscObject)matis->A);
762:     local_mat = matis->A;
763:   }

765:   /* Set values */
766:   MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);
767:   if (isdense) { /* special case for dense local matrices */
768:     PetscInt i,*dummy_rows,*dummy_cols;

770:     if (local_rows != local_cols) {
771:       PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);
772:       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
773:       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
774:     } else {
775:       PetscMalloc1(local_rows,&dummy_rows);
776:       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
777:       dummy_cols = dummy_rows;
778:     }
779:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
780:     MatDenseGetArray(local_mat,&array);
781:     MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);
782:     MatDenseRestoreArray(local_mat,&array);
783:     if (dummy_rows != dummy_cols) {
784:       PetscFree2(dummy_rows,dummy_cols);
785:     } else {
786:       PetscFree(dummy_rows);
787:     }
788:   } else if (isseqaij) {
789:     PetscInt  i,nvtxs,*xadj,*adjncy;
790:     PetscBool done;

792:     MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
793:     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
794:     MatSeqAIJGetArray(local_mat,&array);
795:     for (i=0;i<nvtxs;i++) {
796:       MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);
797:     }
798:     MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
799:     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
800:     MatSeqAIJRestoreArray(local_mat,&array);
801:   } else { /* very basic values insertion for all other matrix types */
802:     PetscInt i;

804:     for (i=0;i<local_rows;i++) {
805:       PetscInt       j;
806:       const PetscInt *local_indices_cols;

808:       MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
809:       MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);
810:       MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);
811:     }
812:   }
813:   MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
814:   MatDestroy(&local_mat);
815:   MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
816:   if (isdense) {
817:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
818:   }
819:   return(0);
820: }

824: /*@
825:     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format

827:   Input Parameter:
828: .  mat - the matrix (should be of type MATIS)
829: .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX

831:   Output Parameter:
832: .  newmat - the matrix in AIJ format

834:   Level: developer

836:   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.

838: .seealso: MATIS
839: @*/
840: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
841: {

848:   if (reuse != MAT_INITIAL_MATRIX) {
851:     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
852:   }
853:   PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
854:   return(0);
855: }

859: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
860: {
862:   Mat_IS         *matis = (Mat_IS*)(mat->data);
863:   PetscInt       bs,m,n,M,N;
864:   Mat            B,localmat;

867:   MatGetBlockSize(mat,&bs);
868:   MatGetSize(mat,&M,&N);
869:   MatGetLocalSize(mat,&m,&n);
870:   MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);
871:   MatDuplicate(matis->A,op,&localmat);
872:   MatISSetLocalMat(B,localmat);
873:   MatDestroy(&localmat);
874:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
875:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
876:   *newmat = B;
877:   return(0);
878: }

882: static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
883: {
885:   Mat_IS         *matis = (Mat_IS*)A->data;
886:   PetscBool      local_sym;

889:   MatIsHermitian(matis->A,tol,&local_sym);
890:   MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
891:   return(0);
892: }

896: static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
897: {
899:   Mat_IS         *matis = (Mat_IS*)A->data;
900:   PetscBool      local_sym;

903:   MatIsSymmetric(matis->A,tol,&local_sym);
904:   MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
905:   return(0);
906: }

910: static PetscErrorCode MatDestroy_IS(Mat A)
911: {
913:   Mat_IS         *b = (Mat_IS*)A->data;

916:   MatDestroy(&b->A);
917:   VecScatterDestroy(&b->cctx);
918:   VecScatterDestroy(&b->rctx);
919:   VecDestroy(&b->x);
920:   VecDestroy(&b->y);
921:   VecDestroy(&b->counter);
922:   ISDestroy(&b->getsub_ris);
923:   ISDestroy(&b->getsub_cis);
924:   if (b->sf != b->csf) {
925:     PetscSFDestroy(&b->csf);
926:     PetscFree2(b->csf_rootdata,b->csf_leafdata);
927:   }
928:   PetscSFDestroy(&b->sf);
929:   PetscFree2(b->sf_rootdata,b->sf_leafdata);
930:   PetscFree(A->data);
931:   PetscObjectChangeTypeName((PetscObject)A,0);
932:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
933:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
934:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
935:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
936:   return(0);
937: }

941: static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
942: {
944:   Mat_IS         *is  = (Mat_IS*)A->data;
945:   PetscScalar    zero = 0.0;

948:   /*  scatter the global vector x into the local work vector */
949:   VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
950:   VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

952:   /* multiply the local matrix */
953:   MatMult(is->A,is->x,is->y);

955:   /* scatter product back into global memory */
956:   VecSet(y,zero);
957:   VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
958:   VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
959:   return(0);
960: }

964: static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
965: {
966:   Vec            temp_vec;

970:   if (v3 != v2) {
971:     MatMult(A,v1,v3);
972:     VecAXPY(v3,1.0,v2);
973:   } else {
974:     VecDuplicate(v2,&temp_vec);
975:     MatMult(A,v1,temp_vec);
976:     VecAXPY(temp_vec,1.0,v2);
977:     VecCopy(temp_vec,v3);
978:     VecDestroy(&temp_vec);
979:   }
980:   return(0);
981: }

985: static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
986: {
987:   Mat_IS         *is = (Mat_IS*)A->data;

991:   /*  scatter the global vector x into the local work vector */
992:   VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);
993:   VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);

995:   /* multiply the local matrix */
996:   MatMultTranspose(is->A,is->y,is->x);

998:   /* scatter product back into global vector */
999:   VecSet(x,0);
1000:   VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
1001:   VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);
1002:   return(0);
1003: }

1007: static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1008: {
1009:   Vec            temp_vec;

1013:   if (v3 != v2) {
1014:     MatMultTranspose(A,v1,v3);
1015:     VecAXPY(v3,1.0,v2);
1016:   } else {
1017:     VecDuplicate(v2,&temp_vec);
1018:     MatMultTranspose(A,v1,temp_vec);
1019:     VecAXPY(temp_vec,1.0,v2);
1020:     VecCopy(temp_vec,v3);
1021:     VecDestroy(&temp_vec);
1022:   }
1023:   return(0);
1024: }

1028: static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1029: {
1030:   Mat_IS         *a = (Mat_IS*)A->data;
1032:   PetscViewer    sviewer;

1035:   PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
1036:   MatView(a->A,sviewer);
1037:   PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);
1038:   PetscViewerFlush(viewer);
1039:   return(0);
1040: }

1044: static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1045: {
1047:   PetscInt       nr,rbs,nc,cbs;
1048:   Mat_IS         *is = (Mat_IS*)A->data;
1049:   IS             from,to;
1050:   Vec            cglobal,rglobal;

1055:   /* Destroy any previous data */
1056:   VecDestroy(&is->x);
1057:   VecDestroy(&is->y);
1058:   VecDestroy(&is->counter);
1059:   VecScatterDestroy(&is->rctx);
1060:   VecScatterDestroy(&is->cctx);
1061:   MatDestroy(&is->A);
1062:   PetscSFDestroy(&is->sf);
1063:   PetscFree2(is->sf_rootdata,is->sf_leafdata);

1065:   /* Setup Layout and set local to global maps */
1066:   PetscLayoutSetUp(A->rmap);
1067:   PetscLayoutSetUp(A->cmap);
1068:   PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
1069:   PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);

1071:   /* Create the local matrix A */
1072:   ISLocalToGlobalMappingGetSize(rmapping,&nr);
1073:   ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);
1074:   ISLocalToGlobalMappingGetSize(cmapping,&nc);
1075:   ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);
1076:   MatCreate(PETSC_COMM_SELF,&is->A);
1077:   MatSetType(is->A,MATAIJ);
1078:   MatSetSizes(is->A,nr,nc,nr,nc);
1079:   MatSetBlockSizes(is->A,rbs,cbs);
1080:   MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
1081:   MatAppendOptionsPrefix(is->A,"is_");
1082:   MatSetFromOptions(is->A);

1084:   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1085:     /* Create the local work vectors */
1086:     MatCreateVecs(is->A,&is->x,&is->y);

1088:     /* setup the global to local scatters */
1089:     MatCreateVecs(A,&cglobal,&rglobal);
1090:     ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);
1091:     ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
1092:     VecScatterCreate(rglobal,from,is->y,to,&is->rctx);
1093:     if (rmapping != cmapping) {
1094:       ISDestroy(&to);
1095:       ISDestroy(&from);
1096:       ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);
1097:       ISLocalToGlobalMappingApplyIS(cmapping,to,&from);
1098:       VecScatterCreate(cglobal,from,is->x,to,&is->cctx);
1099:     } else {
1100:       PetscObjectReference((PetscObject)is->rctx);
1101:       is->cctx = is->rctx;
1102:     }

1104:     /* interface counter vector (local) */
1105:     VecDuplicate(is->y,&is->counter);
1106:     VecSet(is->y,1.);
1107:     VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);
1108:     VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);
1109:     VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);
1110:     VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);

1112:     /* free workspace */
1113:     VecDestroy(&rglobal);
1114:     VecDestroy(&cglobal);
1115:     ISDestroy(&to);
1116:     ISDestroy(&from);
1117:   }
1118:   MatSetUp(A);
1119:   return(0);
1120: }

1124: static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1125: {
1126:   Mat_IS         *is = (Mat_IS*)mat->data;
1128: #if defined(PETSC_USE_DEBUG)
1129:   PetscInt       i,zm,zn;
1130: #endif
1131:   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];

1134: #if defined(PETSC_USE_DEBUG)
1135:   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1136:   /* count negative indices */
1137:   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1138:   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1139: #endif
1140:   ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);
1141:   ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);
1142: #if defined(PETSC_USE_DEBUG)
1143:   /* count negative indices (should be the same as before) */
1144:   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1145:   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1146:   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1147:   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1148: #endif
1149:   MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
1150:   return(0);
1151: }

1155: static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1156: {
1157:   Mat_IS         *is = (Mat_IS*)mat->data;
1159: #if defined(PETSC_USE_DEBUG)
1160:   PetscInt       i,zm,zn;
1161: #endif
1162:   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];

1165: #if defined(PETSC_USE_DEBUG)
1166:   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1167:   /* count negative indices */
1168:   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1169:   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1170: #endif
1171:   ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);
1172:   ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);
1173: #if defined(PETSC_USE_DEBUG)
1174:   /* count negative indices (should be the same as before) */
1175:   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1176:   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1177:   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1178:   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1179: #endif
1180:   MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);
1181:   return(0);
1182: }

1186: static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1187: {
1189:   Mat_IS         *is = (Mat_IS*)A->data;

1192:   MatSetValues(is->A,m,rows,n,cols,values,addv);
1193:   return(0);
1194: }

1198: static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1199: {
1201:   Mat_IS         *is = (Mat_IS*)A->data;

1204:   MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
1205:   return(0);
1206: }

1210: static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1211: {
1212:   Mat_IS         *is = (Mat_IS*)A->data;

1216:   if (!n) {
1217:     is->pure_neumann = PETSC_TRUE;
1218:   } else {
1219:     PetscInt i;
1220:     is->pure_neumann = PETSC_FALSE;

1222:     if (columns) {
1223:       MatZeroRowsColumns(is->A,n,rows,diag,0,0);
1224:     } else {
1225:       MatZeroRows(is->A,n,rows,diag,0,0);
1226:     }
1227:     if (diag != 0.) {
1228:       const PetscScalar *array;
1229:       VecGetArrayRead(is->counter,&array);
1230:       for (i=0; i<n; i++) {
1231:         MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
1232:       }
1233:       VecRestoreArrayRead(is->counter,&array);
1234:     }
1235:     MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
1236:     MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);
1237:   }
1238:   return(0);
1239: }

1243: static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1244: {
1245:   Mat_IS         *matis = (Mat_IS*)A->data;
1246:   PetscInt       nr,nl,len,i;
1247:   PetscInt       *lrows;

1251: #if defined(PETSC_USE_DEBUG)
1252:   if (columns || diag != 0. || (x && b)) {
1253:     PetscBool cong;
1254:     PetscLayoutCompare(A->rmap,A->cmap,&cong);
1255:     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1256:     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1257:     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1258:   }
1259: #endif
1260:   /* get locally owned rows */
1261:   PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);
1262:   /* fix right hand side if needed */
1263:   if (x && b) {
1264:     const PetscScalar *xx;
1265:     PetscScalar       *bb;

1267:     VecGetArrayRead(x, &xx);
1268:     VecGetArray(b, &bb);
1269:     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1270:     VecRestoreArrayRead(x, &xx);
1271:     VecRestoreArray(b, &bb);
1272:   }
1273:   /* get rows associated to the local matrices */
1274:   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1275:     MatISComputeSF_Private(A);
1276:   }
1277:   MatGetSize(matis->A,&nl,NULL);
1278:   PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));
1279:   PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));
1280:   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1281:   PetscFree(lrows);
1282:   PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1283:   PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
1284:   PetscMalloc1(nl,&lrows);
1285:   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1286:   MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);
1287:   PetscFree(lrows);
1288:   return(0);
1289: }

1293: static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1294: {

1298:   MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);
1299:   return(0);
1300: }

1304: static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1305: {

1309:   MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);
1310:   return(0);
1311: }

1315: static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1316: {
1317:   Mat_IS         *is = (Mat_IS*)A->data;

1321:   MatAssemblyBegin(is->A,type);
1322:   return(0);
1323: }

1327: static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1328: {
1329:   Mat_IS         *is = (Mat_IS*)A->data;

1333:   MatAssemblyEnd(is->A,type);
1334:   return(0);
1335: }

1339: static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1340: {
1341:   Mat_IS *is = (Mat_IS*)mat->data;

1344:   *local = is->A;
1345:   return(0);
1346: }

1350: /*@
1351:     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.

1353:   Input Parameter:
1354: .  mat - the matrix

1356:   Output Parameter:
1357: .  local - the local matrix

1359:   Level: advanced

1361:   Notes:
1362:     This can be called if you have precomputed the nonzero structure of the
1363:   matrix and want to provide it to the inner matrix object to improve the performance
1364:   of the MatSetValues() operation.

1366: .seealso: MATIS
1367: @*/
1368: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1369: {

1375:   PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
1376:   return(0);
1377: }

1381: static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1382: {
1383:   Mat_IS         *is = (Mat_IS*)mat->data;
1384:   PetscInt       nrows,ncols,orows,ocols;

1388:   if (is->A) {
1389:     MatGetSize(is->A,&orows,&ocols);
1390:     MatGetSize(local,&nrows,&ncols);
1391:     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1392:   }
1393:   PetscObjectReference((PetscObject)local);
1394:   MatDestroy(&is->A);
1395:   is->A = local;
1396:   return(0);
1397: }

1401: /*@
1402:     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.

1404:   Input Parameter:
1405: .  mat - the matrix
1406: .  local - the local matrix

1408:   Output Parameter:

1410:   Level: advanced

1412:   Notes:
1413:     This can be called if you have precomputed the local matrix and
1414:   want to provide it to the matrix object MATIS.

1416: .seealso: MATIS
1417: @*/
1418: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1419: {

1425:   PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
1426:   return(0);
1427: }

1431: static PetscErrorCode MatZeroEntries_IS(Mat A)
1432: {
1433:   Mat_IS         *a = (Mat_IS*)A->data;

1437:   MatZeroEntries(a->A);
1438:   return(0);
1439: }

1443: static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1444: {
1445:   Mat_IS         *is = (Mat_IS*)A->data;

1449:   MatScale(is->A,a);
1450:   return(0);
1451: }

1455: static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1456: {
1457:   Mat_IS         *is = (Mat_IS*)A->data;

1461:   /* get diagonal of the local matrix */
1462:   MatGetDiagonal(is->A,is->y);

1464:   /* scatter diagonal back into global vector */
1465:   VecSet(v,0);
1466:   VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
1467:   VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);
1468:   return(0);
1469: }

1473: static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1474: {
1475:   Mat_IS         *a = (Mat_IS*)A->data;

1479:   MatSetOption(a->A,op,flg);
1480:   return(0);
1481: }

1485: static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1486: {
1487:   Mat_IS         *y = (Mat_IS*)Y->data;
1488:   Mat_IS         *x;
1489: #if defined(PETSC_USE_DEBUG)
1490:   PetscBool      ismatis;
1491: #endif

1495: #if defined(PETSC_USE_DEBUG)
1496:   PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);
1497:   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1498: #endif
1499:   x = (Mat_IS*)X->data;
1500:   MatAXPY(y->A,a,x->A,str);
1501:   return(0);
1502: }

1506: static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1507: {
1508:   Mat                    lA;
1509:   Mat_IS                 *matis;
1510:   ISLocalToGlobalMapping rl2g,cl2g;
1511:   IS                     is;
1512:   const PetscInt         *rg,*rl;
1513:   PetscInt               nrg;
1514:   PetscInt               N,M,nrl,i,*idxs;
1515:   PetscErrorCode         ierr;

1518:   ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);
1519:   ISGetLocalSize(row,&nrl);
1520:   ISGetIndices(row,&rl);
1521:   ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);
1522: #if defined(PETSC_USE_DEBUG)
1523:   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1524: #endif
1525:   PetscMalloc1(nrg,&idxs);
1526:   /* map from [0,nrl) to row */
1527:   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1528: #if defined(PETSC_USE_DEBUG)
1529:   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1530: #else
1531:   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1532: #endif
1533:   ISRestoreIndices(row,&rl);
1534:   ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);
1535:   ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);
1536:   ISLocalToGlobalMappingCreateIS(is,&rl2g);
1537:   ISDestroy(&is);
1538:   /* compute new l2g map for columns */
1539:   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1540:     const PetscInt *cg,*cl;
1541:     PetscInt       ncg;
1542:     PetscInt       ncl;

1544:     ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);
1545:     ISGetLocalSize(col,&ncl);
1546:     ISGetIndices(col,&cl);
1547:     ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);
1548: #if defined(PETSC_USE_DEBUG)
1549:     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1550: #endif
1551:     PetscMalloc1(ncg,&idxs);
1552:     /* map from [0,ncl) to col */
1553:     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1554: #if defined(PETSC_USE_DEBUG)
1555:     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1556: #else
1557:     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1558: #endif
1559:     ISRestoreIndices(col,&cl);
1560:     ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);
1561:     ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);
1562:     ISLocalToGlobalMappingCreateIS(is,&cl2g);
1563:     ISDestroy(&is);
1564:   } else {
1565:     PetscObjectReference((PetscObject)rl2g);
1566:     cl2g = rl2g;
1567:   }
1568:   /* create the MATIS submatrix */
1569:   MatGetSize(A,&M,&N);
1570:   MatCreate(PetscObjectComm((PetscObject)A),submat);
1571:   MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);
1572:   MatSetType(*submat,MATIS);
1573:   matis = (Mat_IS*)((*submat)->data);
1574:   matis->islocalref = PETSC_TRUE;
1575:   MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);
1576:   MatISGetLocalMat(A,&lA);
1577:   MatISSetLocalMat(*submat,lA);
1578:   MatSetUp(*submat);
1579:   MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);
1580:   MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);
1581:   ISLocalToGlobalMappingDestroy(&rl2g);
1582:   ISLocalToGlobalMappingDestroy(&cl2g);
1583:   /* remove unsupported ops */
1584:   PetscMemzero((*submat)->ops,sizeof(struct _MatOps));
1585:   (*submat)->ops->destroy               = MatDestroy_IS;
1586:   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1587:   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1588:   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1589:   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1590:   return(0);
1591: }

1595: /*@
1596:     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1597:        process but not across processes.

1599:    Input Parameters:
1600: +     comm    - MPI communicator that will share the matrix
1601: .     bs      - block size of the matrix
1602: .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1603: .     rmap    - local to global map for rows
1604: -     cmap    - local to global map for cols

1606:    Output Parameter:
1607: .    A - the resulting matrix

1609:    Level: advanced

1611:    Notes: See MATIS for more details.
1612:           m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors
1613:           used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices.
1614:           If either rmap or cmap are NULL, then the matrix is assumed to be square.

1616: .seealso: MATIS, MatSetLocalToGlobalMapping()
1617: @*/
1618: PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1619: {

1623:   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings");
1624:   MatCreate(comm,A);
1625:   if (bs > 0) {
1626:     MatSetBlockSize(*A,bs);
1627:   }
1628:   MatSetSizes(*A,m,n,M,N);
1629:   MatSetType(*A,MATIS);
1630:   if (rmap && cmap) {
1631:     MatSetLocalToGlobalMapping(*A,rmap,cmap);
1632:   } else if (!rmap) {
1633:     MatSetLocalToGlobalMapping(*A,cmap,cmap);
1634:   } else {
1635:     MatSetLocalToGlobalMapping(*A,rmap,rmap);
1636:   }
1637:   return(0);
1638: }

1640: /*MC
1641:    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1642:    This stores the matrices in globally unassembled form. Each processor
1643:    assembles only its local Neumann problem and the parallel matrix vector
1644:    product is handled "implicitly".

1646:    Operations Provided:
1647: +  MatMult()
1648: .  MatMultAdd()
1649: .  MatMultTranspose()
1650: .  MatMultTransposeAdd()
1651: .  MatZeroEntries()
1652: .  MatSetOption()
1653: .  MatZeroRows()
1654: .  MatSetValues()
1655: .  MatSetValuesBlocked()
1656: .  MatSetValuesLocal()
1657: .  MatSetValuesBlockedLocal()
1658: .  MatScale()
1659: .  MatGetDiagonal()
1660: .  MatMissingDiagonal()
1661: .  MatDuplicate()
1662: .  MatCopy()
1663: .  MatAXPY()
1664: .  MatGetSubMatrix()
1665: .  MatGetLocalSubMatrix()
1666: .  MatTranspose()
1667: -  MatSetLocalToGlobalMapping()

1669:    Options Database Keys:
1670: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()

1672:    Notes: Options prefix for the inner matrix are given by -is_mat_xxx

1674:           You must call MatSetLocalToGlobalMapping() before using this matrix type.

1676:           You can do matrix preallocation on the local matrix after you obtain it with
1677:           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()

1679:   Level: advanced

1681: .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP

1683: M*/

1687: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1688: {
1690:   Mat_IS         *b;

1693:   PetscNewLog(A,&b);
1694:   A->data = (void*)b;

1696:   /* matrix ops */
1697:   PetscMemzero(A->ops,sizeof(struct _MatOps));
1698:   A->ops->mult                    = MatMult_IS;
1699:   A->ops->multadd                 = MatMultAdd_IS;
1700:   A->ops->multtranspose           = MatMultTranspose_IS;
1701:   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1702:   A->ops->destroy                 = MatDestroy_IS;
1703:   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1704:   A->ops->setvalues               = MatSetValues_IS;
1705:   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1706:   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1707:   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1708:   A->ops->zerorows                = MatZeroRows_IS;
1709:   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1710:   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1711:   A->ops->assemblyend             = MatAssemblyEnd_IS;
1712:   A->ops->view                    = MatView_IS;
1713:   A->ops->zeroentries             = MatZeroEntries_IS;
1714:   A->ops->scale                   = MatScale_IS;
1715:   A->ops->getdiagonal             = MatGetDiagonal_IS;
1716:   A->ops->setoption               = MatSetOption_IS;
1717:   A->ops->ishermitian             = MatIsHermitian_IS;
1718:   A->ops->issymmetric             = MatIsSymmetric_IS;
1719:   A->ops->duplicate               = MatDuplicate_IS;
1720:   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1721:   A->ops->copy                    = MatCopy_IS;
1722:   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1723:   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1724:   A->ops->axpy                    = MatAXPY_IS;
1725:   A->ops->diagonalset             = MatDiagonalSet_IS;
1726:   A->ops->shift                   = MatShift_IS;
1727:   A->ops->transpose               = MatTranspose_IS;

1729:   /* special MATIS functions */
1730:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
1731:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
1732:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
1733:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
1734:   PetscObjectChangeTypeName((PetscObject)A,MATIS);
1735:   return(0);
1736: }