Actual source code: matis.c

petsc-3.6.0 2015-06-09
Report Typos and Errors
  2: /*
  3:     Creates a matrix class for using the Neumann-Neumann type preconditioners.
  4:    This stores the matrices in globally unassembled form. Each processor
  5:    assembles only its local Neumann problem and the parallel matrix vector
  6:    product is handled "implicitly".

  8:      We provide:
  9:          MatMult()

 11:     Currently this allows for only one subdomain per processor.

 13: */

 15: #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/

 17: static PetscErrorCode MatISComputeSF_Private(Mat);

 21: static PetscErrorCode MatISComputeSF_Private(Mat B)
 22: {
 23:   Mat_IS         *matis = (Mat_IS*)(B->data);
 24:   const PetscInt *gidxs;

 28:   MatGetSize(matis->A,&matis->sf_nleaves,NULL);
 29:   MatGetLocalSize(B,&matis->sf_nroots,NULL);
 30:   PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);
 31:   ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);
 32:   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
 33:   PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);
 34:   ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);
 35:   PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);
 36:   return(0);
 37: }

 41: /*@
 42:    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.

 44:    Collective on MPI_Comm

 46:    Input Parameters:
 47: +  B - the matrix
 48: .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
 49:            (same value is used for all local rows)
 50: .  d_nnz - array containing the number of nonzeros in the various rows of the
 51:            DIAGONAL portion of the local submatrix (possibly different for each row)
 52:            or NULL, if d_nz is used to specify the nonzero structure.
 53:            The size of this array is equal to the number of local rows, i.e 'm'.
 54:            For matrices that will be factored, you must leave room for (and set)
 55:            the diagonal entry even if it is zero.
 56: .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
 57:            submatrix (same value is used for all local rows).
 58: -  o_nnz - array containing the number of nonzeros in the various rows of the
 59:            OFF-DIAGONAL portion of the local submatrix (possibly different for
 60:            each row) or NULL, if o_nz is used to specify the nonzero
 61:            structure. The size of this array is equal to the number
 62:            of local rows, i.e 'm'.

 64:    If the *_nnz parameter is given then the *_nz parameter is ignored

 66:    Level: intermediate

 68:    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
 69:           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
 70:           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.

 72: .keywords: matrix

 74: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
 75: @*/
 76: PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 77: {

 83:   PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
 84:   return(0);
 85: }

 89: PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 90: {
 91:   Mat_IS         *matis = (Mat_IS*)(B->data);
 92:   PetscInt       bs,i,nlocalcols;

 96:   if (!matis->A) {
 97:     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
 98:   }
 99:   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100:     MatISComputeSF_Private(B);
101:   }
102:   if (!d_nnz) {
103:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104:   } else {
105:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106:   }
107:   if (!o_nnz) {
108:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109:   } else {
110:     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111:   }
112:   PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
113:   MatGetSize(matis->A,NULL,&nlocalcols);
114:   MatGetBlockSize(matis->A,&bs);
115:   PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);
116:   for (i=0;i<matis->sf_nleaves;i++) {
117:     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118:   }
119:   MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);
120:   for (i=0;i<matis->sf_nleaves/bs;i++) {
121:     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122:   }
123:   MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
124:   for (i=0;i<matis->sf_nleaves/bs;i++) {
125:     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126:   }
127:   MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);
128:   return(0);
129: }

133: PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134: {
135:   Mat_IS          *matis = (Mat_IS*)(A->data);
136:   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137:   const PetscInt* global_indices;
138:   PetscInt        i,j,bs,rows,cols;
139:   PetscInt        lrows,lcols;
140:   PetscInt        local_rows,local_cols;
141:   PetscMPIInt     nsubdomains;
142:   PetscBool       isdense,issbaij;
143:   PetscErrorCode  ierr;

146:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);
147:   MatGetSize(A,&rows,&cols);
148:   MatGetBlockSize(A,&bs);
149:   MatGetSize(matis->A,&local_rows,&local_cols);
150:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
151:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);
152:   ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices);
153:   if (issbaij) {
154:     MatGetRowUpperTriangular(matis->A);
155:   }
156:   /*
157:      An SF reduce is needed to sum up properly on shared interface dofs.
158:      Note that generally preallocation is not exact, since it overestimates nonzeros
159:   */
160:   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
161:     MatISComputeSF_Private(A);
162:   }
163:   MatGetLocalSize(A,&lrows,&lcols);
164:   MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);
165:   /* All processes need to compute entire row ownership */
166:   PetscMalloc1(rows,&row_ownership);
167:   MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);
168:   for (i=0;i<nsubdomains;i++) {
169:     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
170:       row_ownership[j]=i;
171:     }
172:   }

174:   /*
175:      my_dnz and my_onz contains exact contribution to preallocation from each local mat
176:      then, they will be summed up properly. This way, preallocation is always sufficient
177:   */
178:   PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);
179:   /* preallocation as a MATAIJ */
180:   if (isdense) { /* special case for dense local matrices */
181:     for (i=0;i<local_rows;i++) {
182:       PetscInt index_row = global_indices[i];
183:       for (j=i;j<local_rows;j++) {
184:         PetscInt owner = row_ownership[index_row];
185:         PetscInt index_col = global_indices[j];
186:         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
187:           my_dnz[i] += 1;
188:         } else { /* offdiag block */
189:           my_onz[i] += 1;
190:         }
191:         /* same as before, interchanging rows and cols */
192:         if (i != j) {
193:           owner = row_ownership[index_col];
194:           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
195:             my_dnz[j] += 1;
196:           } else {
197:             my_onz[j] += 1;
198:           }
199:         }
200:       }
201:     }
202:   } else {
203:     for (i=0;i<local_rows;i++) {
204:       const PetscInt *cols;
205:       PetscInt       ncols,index_row = global_indices[i];
206:       MatGetRow(matis->A,i,&ncols,&cols,NULL);
207:       for (j=0;j<ncols;j++) {
208:         PetscInt owner = row_ownership[index_row];
209:         PetscInt index_col = global_indices[cols[j]];
210:         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
211:           my_dnz[i] += 1;
212:         } else { /* offdiag block */
213:           my_onz[i] += 1;
214:         }
215:         /* same as before, interchanging rows and cols */
216:         if (issbaij && index_col != index_row) {
217:           owner = row_ownership[index_col];
218:           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
219:             my_dnz[cols[j]] += 1;
220:           } else {
221:             my_onz[cols[j]] += 1;
222:           }
223:         }
224:       }
225:       MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
226:     }
227:   }
228:   ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices);
229:   PetscFree(row_ownership);
230:   if (maxreduce) {
231:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
232:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);
233:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
234:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);
235:   } else {
236:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
237:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);
238:     PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
239:     PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);
240:   }
241:   PetscFree2(my_dnz,my_onz);

243:   /* Resize preallocation if overestimated */
244:   for (i=0;i<lrows;i++) {
245:     dnz[i] = PetscMin(dnz[i],lcols);
246:     onz[i] = PetscMin(onz[i],cols-lcols);
247:   }
248:   /* set preallocation */
249:   MatMPIAIJSetPreallocation(B,0,dnz,0,onz);
250:   for (i=0;i<lrows/bs;i++) {
251:     dnz[i] = dnz[i*bs]/bs;
252:     onz[i] = onz[i*bs]/bs;
253:   }
254:   MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);
255:   MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);
256:   MatPreallocateFinalize(dnz,onz);
257:   if (issbaij) {
258:     MatRestoreRowUpperTriangular(matis->A);
259:   }
260:   return(0);
261: }

265: PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266: {
267:   Mat_IS         *matis = (Mat_IS*)(mat->data);
268:   Mat            local_mat;
269:   /* info on mat */
270:   PetscInt       bs,rows,cols,lrows,lcols;
271:   PetscInt       local_rows,local_cols;
272:   PetscBool      isdense,issbaij,isseqaij;
273:   const PetscInt *global_indices_rows;
274:   PetscMPIInt    nsubdomains;
275:   /* values insertion */
276:   PetscScalar    *array;
277:   /* work */

281:   /* MISSING CHECKS
282:     - rectangular case not covered (it is not allowed by MATIS)
283:   */
284:   /* get info from mat */
285:   MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
286:   if (nsubdomains == 1) {
287:     if (reuse == MAT_INITIAL_MATRIX) {
288:       MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));
289:     } else {
290:       MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);
291:     }
292:     return(0);
293:   }
294:   MatGetSize(mat,&rows,&cols);
295:   MatGetBlockSize(mat,&bs);
296:   MatGetLocalSize(mat,&lrows,&lcols);
297:   MatGetSize(matis->A,&local_rows,&local_cols);
298:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
299:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);
300:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);

302:   /* map indices of local mat rows to global values */
303:   ISLocalToGlobalMappingGetIndices(matis->mapping,&global_indices_rows);

305:   if (reuse == MAT_INITIAL_MATRIX) {
306:     MatType     new_mat_type;
307:     PetscBool   issbaij_red;

309:     /* determining new matrix type */
310:     MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
311:     if (issbaij_red) {
312:       new_mat_type = MATSBAIJ;
313:     } else {
314:       if (bs>1) {
315:         new_mat_type = MATBAIJ;
316:       } else {
317:         new_mat_type = MATAIJ;
318:       }
319:     }

321:     MatCreate(PetscObjectComm((PetscObject)mat),M);
322:     MatSetSizes(*M,lrows,lcols,rows,cols);
323:     MatSetBlockSize(*M,bs);
324:     MatSetType(*M,new_mat_type);
325:     MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);
326:   } else {
327:     PetscInt mbs,mrows,mcols,mlrows,mlcols;
328:     /* some checks */
329:     MatGetBlockSize(*M,&mbs);
330:     MatGetSize(*M,&mrows,&mcols);
331:     MatGetLocalSize(*M,&mlrows,&mlcols);
332:     if (mrows != rows) {
333:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
334:     }
335:     if (mcols != cols) {
336:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
337:     }
338:     if (mlrows != lrows) {
339:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
340:     }
341:     if (mlcols != lcols) {
342:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
343:     }
344:     if (mbs != bs) {
345:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
346:     }
347:     MatZeroEntries(*M);
348:   }

350:   if (issbaij) {
351:     MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);
352:   } else {
353:     PetscObjectReference((PetscObject)matis->A);
354:     local_mat = matis->A;
355:   }

357:   /* Set values */
358:   if (isdense) { /* special case for dense local matrices */
359:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
360:     MatDenseGetArray(local_mat,&array);
361:     MatSetValues(*M,local_rows,global_indices_rows,local_cols,global_indices_rows,array,ADD_VALUES);
362:     MatDenseRestoreArray(local_mat,&array);
363:   } else if (isseqaij) {
364:     PetscInt  i,nvtxs,*xadj,*adjncy,*global_indices_cols;
365:     PetscBool done;

367:     MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
368:     if (!done) {
369:       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
370:     }
371:     MatSeqAIJGetArray(local_mat,&array);
372:     PetscMalloc1(xadj[nvtxs],&global_indices_cols);
373:     ISLocalToGlobalMappingApply(matis->mapping,xadj[nvtxs],adjncy,global_indices_cols);
374:     for (i=0;i<nvtxs;i++) {
375:       PetscInt    row,*cols,ncols;
376:       PetscScalar *mat_vals;

378:       row = global_indices_rows[i];
379:       ncols = xadj[i+1]-xadj[i];
380:       cols = global_indices_cols+xadj[i];
381:       mat_vals = array+xadj[i];
382:       MatSetValues(*M,1,&row,ncols,cols,mat_vals,ADD_VALUES);
383:     }
384:     MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);
385:     if (!done) {
386:       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
387:     }
388:     MatSeqAIJRestoreArray(local_mat,&array);
389:     PetscFree(global_indices_cols);
390:   } else { /* very basic values insertion for all other matrix types */
391:     PetscInt i,*global_indices_cols;

393:     PetscMalloc1(local_cols,&global_indices_cols);
394:     for (i=0;i<local_rows;i++) {
395:       PetscInt       j;
396:       const PetscInt *local_indices;

398:       MatGetRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);
399:       ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices_cols);
400:       MatSetValues(*M,1,&global_indices_rows[i],j,global_indices_cols,array,ADD_VALUES);
401:       MatRestoreRow(local_mat,i,&j,&local_indices,(const PetscScalar**)&array);
402:     }
403:     PetscFree(global_indices_cols);
404:   }
405:   MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
406:   ISLocalToGlobalMappingRestoreIndices(matis->mapping,&global_indices_rows);
407:   MatDestroy(&local_mat);
408:   MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
409:   if (isdense) {
410:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
411:   }
412:   return(0);
413: }

417: /*@
418:     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format

420:   Input Parameter:
421: .  mat - the matrix (should be of type MATIS)
422: .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX

424:   Output Parameter:
425: .  newmat - the matrix in AIJ format

427:   Level: developer

429:   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.

431: .seealso: MATIS
432: @*/
433: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
434: {

441:   if (reuse != MAT_INITIAL_MATRIX) {
444:     if (mat == *newmat) {
445:       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
446:     }
447:   }
448:   PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
449:   return(0);
450: }

454: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
455: {
457:   Mat_IS         *matis = (Mat_IS*)(mat->data);
458:   PetscInt       bs,m,n,M,N;
459:   Mat            B,localmat;

462:   MatGetBlockSize(mat,&bs);
463:   MatGetSize(mat,&M,&N);
464:   MatGetLocalSize(mat,&m,&n);
465:   MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);
466:   MatDuplicate(matis->A,op,&localmat);
467:   MatISSetLocalMat(B,localmat);
468:   MatDestroy(&localmat);
469:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
470:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
471:   *newmat = B;
472:   return(0);
473: }

477: PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
478: {
480:   Mat_IS         *matis = (Mat_IS*)A->data;
481:   PetscBool      local_sym;

484:   MatIsHermitian(matis->A,tol,&local_sym);
485:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
486:   return(0);
487: }

491: PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
492: {
494:   Mat_IS         *matis = (Mat_IS*)A->data;
495:   PetscBool      local_sym;

498:   MatIsSymmetric(matis->A,tol,&local_sym);
499:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
500:   return(0);
501: }

505: PetscErrorCode MatDestroy_IS(Mat A)
506: {
508:   Mat_IS         *b = (Mat_IS*)A->data;

511:   MatDestroy(&b->A);
512:   VecScatterDestroy(&b->ctx);
513:   VecDestroy(&b->x);
514:   VecDestroy(&b->y);
515:   ISLocalToGlobalMappingDestroy(&b->mapping);
516:   PetscSFDestroy(&b->sf);
517:   PetscFree2(b->sf_rootdata,b->sf_leafdata);
518:   PetscFree(A->data);
519:   PetscObjectChangeTypeName((PetscObject)A,0);
520:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
521:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
522:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
523:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
524:   return(0);
525: }

529: PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
530: {
532:   Mat_IS         *is  = (Mat_IS*)A->data;
533:   PetscScalar    zero = 0.0;

536:   /*  scatter the global vector x into the local work vector */
537:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
538:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

540:   /* multiply the local matrix */
541:   MatMult(is->A,is->x,is->y);

543:   /* scatter product back into global memory */
544:   VecSet(y,zero);
545:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
546:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
547:   return(0);
548: }

552: PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
553: {
554:   Vec            temp_vec;

558:   if (v3 != v2) {
559:     MatMult(A,v1,v3);
560:     VecAXPY(v3,1.0,v2);
561:   } else {
562:     VecDuplicate(v2,&temp_vec);
563:     MatMult(A,v1,temp_vec);
564:     VecAXPY(temp_vec,1.0,v2);
565:     VecCopy(temp_vec,v3);
566:     VecDestroy(&temp_vec);
567:   }
568:   return(0);
569: }

573: PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
574: {
575:   Mat_IS         *is = (Mat_IS*)A->data;

579:   /*  scatter the global vector x into the local work vector */
580:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
581:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

583:   /* multiply the local matrix */
584:   MatMultTranspose(is->A,is->x,is->y);

586:   /* scatter product back into global vector */
587:   VecSet(y,0);
588:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
589:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
590:   return(0);
591: }

595: PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
596: {
597:   Vec            temp_vec;

601:   if (v3 != v2) {
602:     MatMultTranspose(A,v1,v3);
603:     VecAXPY(v3,1.0,v2);
604:   } else {
605:     VecDuplicate(v2,&temp_vec);
606:     MatMultTranspose(A,v1,temp_vec);
607:     VecAXPY(temp_vec,1.0,v2);
608:     VecCopy(temp_vec,v3);
609:     VecDestroy(&temp_vec);
610:   }
611:   return(0);
612: }

616: PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
617: {
618:   Mat_IS         *a = (Mat_IS*)A->data;
620:   PetscViewer    sviewer;

623:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
624:   PetscViewerGetSingleton(viewer,&sviewer);
625:   MatView(a->A,sviewer);
626:   PetscViewerRestoreSingleton(viewer,&sviewer);
627:   return(0);
628: }

632: PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
633: {
635:   PetscInt       n,bs;
636:   Mat_IS         *is = (Mat_IS*)A->data;
637:   IS             from,to;
638:   Vec            global;

642:   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
643:   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
644:     ISLocalToGlobalMappingDestroy(&is->mapping);
645:     VecDestroy(&is->x);
646:     VecDestroy(&is->y);
647:     VecScatterDestroy(&is->ctx);
648:     MatDestroy(&is->A);
649:     PetscSFDestroy(&is->sf);
650:     PetscFree2(is->sf_rootdata,is->sf_leafdata);
651:   }
652:   PetscObjectReference((PetscObject)rmapping);
653:   ISLocalToGlobalMappingDestroy(&is->mapping);
654:   is->mapping = rmapping;
655: /*
656:   PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
657:   PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);
658: */

660:   /* Create the local matrix A */
661:   ISLocalToGlobalMappingGetSize(rmapping,&n);
662:   ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);
663:   MatCreate(PETSC_COMM_SELF,&is->A);
664:   if (bs > 1) {
665:     MatSetType(is->A,MATSEQBAIJ);
666:   } else {
667:     MatSetType(is->A,MATSEQAIJ);
668:   }
669:   MatSetSizes(is->A,n,n,n,n);
670:   MatSetBlockSize(is->A,bs);
671:   MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
672:   MatAppendOptionsPrefix(is->A,"is_");
673:   MatSetFromOptions(is->A);

675:   /* Create the local work vectors */
676:   VecCreate(PETSC_COMM_SELF,&is->x);
677:   VecSetBlockSize(is->x,bs);
678:   VecSetSizes(is->x,n,n);
679:   VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);
680:   VecAppendOptionsPrefix(is->x,"is_");
681:   VecSetFromOptions(is->x);
682:   VecDuplicate(is->x,&is->y);

684:   /* setup the global to local scatter */
685:   ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);
686:   ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
687:   MatCreateVecs(A,&global,NULL);
688:   VecScatterCreate(global,from,is->x,to,&is->ctx);
689:   VecDestroy(&global);
690:   ISDestroy(&to);
691:   ISDestroy(&from);
692:   return(0);
693: }

697: PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
698: {
699:   Mat_IS         *is = (Mat_IS*)mat->data;
700:   PetscInt       rows_l[2048],cols_l[2048];

704: #if defined(PETSC_USE_DEBUG)
705:   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
706: #endif
707:   ISG2LMapApply(is->mapping,m,rows,rows_l);
708:   ISG2LMapApply(is->mapping,n,cols,cols_l);
709:   MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
710:   return(0);
711: }

713: #undef ISG2LMapSetUp
714: #undef ISG2LMapApply

718: PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
719: {
721:   Mat_IS         *is = (Mat_IS*)A->data;

724:   MatSetValues(is->A,m,rows,n,cols,values,addv);
725:   return(0);
726: }

730: PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
731: {
733:   Mat_IS         *is = (Mat_IS*)A->data;

736:   MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
737:   return(0);
738: }

742: PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
743: {
744:   Mat_IS         *is = (Mat_IS*)A->data;
745:   PetscInt       n_l = 0, *rows_l = NULL;

749:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
750:   if (n) {
751:     PetscMalloc1(n,&rows_l);
752:     ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);
753:   }
754:   MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);
755:   PetscFree(rows_l);
756:   return(0);
757: }

761: PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
762: {
763:   Mat_IS         *is = (Mat_IS*)A->data;
765:   PetscInt       i;
766:   PetscScalar    *array;

769:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
770:   {
771:     /*
772:        Set up is->x as a "counting vector". This is in order to MatMult_IS
773:        work properly in the interface nodes.
774:     */
775:     Vec         counter;
776:     PetscScalar one=1.0, zero=0.0;
777:     MatCreateVecs(A,&counter,NULL);
778:     VecSet(counter,zero);
779:     VecSet(is->x,one);
780:     VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
781:     VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
782:     VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
783:     VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
784:     VecDestroy(&counter);
785:   }
786:   if (!n) {
787:     is->pure_neumann = PETSC_TRUE;
788:   } else {
789:     is->pure_neumann = PETSC_FALSE;

791:     VecGetArray(is->x,&array);
792:     MatZeroRows(is->A,n,rows,diag,0,0);
793:     for (i=0; i<n; i++) {
794:       MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
795:     }
796:     MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
797:     MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);
798:     VecRestoreArray(is->x,&array);
799:   }
800:   return(0);
801: }

805: PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
806: {
807:   Mat_IS         *is = (Mat_IS*)A->data;

811:   MatAssemblyBegin(is->A,type);
812:   return(0);
813: }

817: PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
818: {
819:   Mat_IS         *is = (Mat_IS*)A->data;

823:   MatAssemblyEnd(is->A,type);
824:   return(0);
825: }

829: PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
830: {
831:   Mat_IS *is = (Mat_IS*)mat->data;

834:   *local = is->A;
835:   return(0);
836: }

840: /*@
841:     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.

843:   Input Parameter:
844: .  mat - the matrix

846:   Output Parameter:
847: .  local - the local matrix

849:   Level: advanced

851:   Notes:
852:     This can be called if you have precomputed the nonzero structure of the
853:   matrix and want to provide it to the inner matrix object to improve the performance
854:   of the MatSetValues() operation.

856: .seealso: MATIS
857: @*/
858: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
859: {

865:   PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
866:   return(0);
867: }

871: PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
872: {
873:   Mat_IS         *is = (Mat_IS*)mat->data;
874:   PetscInt       nrows,ncols,orows,ocols;

878:   if (is->A) {
879:     MatGetSize(is->A,&orows,&ocols);
880:     MatGetSize(local,&nrows,&ncols);
881:     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
882:   }
883:   PetscObjectReference((PetscObject)local);
884:   MatDestroy(&is->A);
885:   is->A = local;
886:   return(0);
887: }

891: /*@
892:     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.

894:   Input Parameter:
895: .  mat - the matrix
896: .  local - the local matrix

898:   Output Parameter:

900:   Level: advanced

902:   Notes:
903:     This can be called if you have precomputed the local matrix and
904:   want to provide it to the matrix object MATIS.

906: .seealso: MATIS
907: @*/
908: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
909: {

915:   PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
916:   return(0);
917: }

921: PetscErrorCode MatZeroEntries_IS(Mat A)
922: {
923:   Mat_IS         *a = (Mat_IS*)A->data;

927:   MatZeroEntries(a->A);
928:   return(0);
929: }

933: PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
934: {
935:   Mat_IS         *is = (Mat_IS*)A->data;

939:   MatScale(is->A,a);
940:   return(0);
941: }

945: PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
946: {
947:   Mat_IS         *is = (Mat_IS*)A->data;

951:   /* get diagonal of the local matrix */
952:   MatGetDiagonal(is->A,is->x);

954:   /* scatter diagonal back into global vector */
955:   VecSet(v,0);
956:   VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
957:   VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
958:   return(0);
959: }

963: PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
964: {
965:   Mat_IS         *a = (Mat_IS*)A->data;

969:   MatSetOption(a->A,op,flg);
970:   return(0);
971: }

975: /*@
976:     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
977:        process but not across processes.

979:    Input Parameters:
980: +     comm - MPI communicator that will share the matrix
981: .     bs - local and global block size of the matrix
982: .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
983: -     map - mapping that defines the global number for each local number

985:    Output Parameter:
986: .    A - the resulting matrix

988:    Level: advanced

990:    Notes: See MATIS for more details
991:           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
992:           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
993:           plus the ghost points to global indices.

995: .seealso: MATIS, MatSetLocalToGlobalMapping()
996: @*/
997: PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
998: {

1002:   MatCreate(comm,A);
1003:   MatSetBlockSize(*A,bs);
1004:   MatSetSizes(*A,m,n,M,N);
1005:   MatSetType(*A,MATIS);
1006:   MatSetUp(*A);
1007:   MatSetLocalToGlobalMapping(*A,map,map);
1008:   return(0);
1009: }

1011: /*MC
1012:    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1013:    This stores the matrices in globally unassembled form. Each processor
1014:    assembles only its local Neumann problem and the parallel matrix vector
1015:    product is handled "implicitly".

1017:    Operations Provided:
1018: +  MatMult()
1019: .  MatMultAdd()
1020: .  MatMultTranspose()
1021: .  MatMultTransposeAdd()
1022: .  MatZeroEntries()
1023: .  MatSetOption()
1024: .  MatZeroRows()
1025: .  MatZeroRowsLocal()
1026: .  MatSetValues()
1027: .  MatSetValuesLocal()
1028: .  MatScale()
1029: .  MatGetDiagonal()
1030: -  MatSetLocalToGlobalMapping()

1032:    Options Database Keys:
1033: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()

1035:    Notes: Options prefix for the inner matrix are given by -is_mat_xxx

1037:           You must call MatSetLocalToGlobalMapping() before using this matrix type.

1039:           You can do matrix preallocation on the local matrix after you obtain it with
1040:           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()

1042:   Level: advanced

1044: .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC

1046: M*/

1050: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1051: {
1053:   Mat_IS         *b;

1056:   PetscNewLog(A,&b);
1057:   A->data = (void*)b;
1058:   PetscMemzero(A->ops,sizeof(struct _MatOps));

1060:   A->ops->mult                    = MatMult_IS;
1061:   A->ops->multadd                 = MatMultAdd_IS;
1062:   A->ops->multtranspose           = MatMultTranspose_IS;
1063:   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1064:   A->ops->destroy                 = MatDestroy_IS;
1065:   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1066:   A->ops->setvalues               = MatSetValues_IS;
1067:   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1068:   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1069:   A->ops->zerorows                = MatZeroRows_IS;
1070:   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1071:   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1072:   A->ops->assemblyend             = MatAssemblyEnd_IS;
1073:   A->ops->view                    = MatView_IS;
1074:   A->ops->zeroentries             = MatZeroEntries_IS;
1075:   A->ops->scale                   = MatScale_IS;
1076:   A->ops->getdiagonal             = MatGetDiagonal_IS;
1077:   A->ops->setoption               = MatSetOption_IS;
1078:   A->ops->ishermitian             = MatIsHermitian_IS;
1079:   A->ops->issymmetric             = MatIsSymmetric_IS;
1080:   A->ops->duplicate               = MatDuplicate_IS;

1082:   PetscLayoutSetUp(A->rmap);
1083:   PetscLayoutSetUp(A->cmap);

1085:   /* zeros pointer */
1086:   b->A           = 0;
1087:   b->ctx         = 0;
1088:   b->x           = 0;
1089:   b->y           = 0;
1090:   b->mapping     = 0;
1091:   b->sf          = 0;
1092:   b->sf_rootdata = 0;
1093:   b->sf_leafdata = 0;

1095:   /* special MATIS functions */
1096:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
1097:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
1098:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
1099:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
1100:   PetscObjectChangeTypeName((PetscObject)A,MATIS);
1101:   return(0);
1102: }