Actual source code: matis.c

petsc-master 2014-10-19
Report Typos and Errors
  2: /*
  3:     Creates a matrix class for using the Neumann-Neumann type preconditioners.
  4:    This stores the matrices in globally unassembled form. Each processor
  5:    assembles only its local Neumann problem and the parallel matrix vector
  6:    product is handled "implicitly".

  8:      We provide:
  9:          MatMult()

 11:     Currently this allows for only one subdomain per processor.

 13: */

 15: #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
 16: #include <petscsf.h>

 20: /*
 21:    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.

 23:    Collective on MPI_Comm

 25:    Input Parameters:
 26: +  B - the matrix
 27: .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
 28:            (same value is used for all local rows)
 29: .  d_nnz - array containing the number of nonzeros in the various rows of the
 30:            DIAGONAL portion of the local submatrix (possibly different for each row)
 31:            or NULL, if d_nz is used to specify the nonzero structure.
 32:            The size of this array is equal to the number of local rows, i.e 'm'.
 33:            For matrices that will be factored, you must leave room for (and set)
 34:            the diagonal entry even if it is zero.
 35: .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
 36:            submatrix (same value is used for all local rows).
 37: -  o_nnz - array containing the number of nonzeros in the various rows of the
 38:            OFF-DIAGONAL portion of the local submatrix (possibly different for
 39:            each row) or NULL, if o_nz is used to specify the nonzero
 40:            structure. The size of this array is equal to the number
 41:            of local rows, i.e 'm'.

 43:    If the *_nnz parameter is given then the *_nz parameter is ignored

 45:    Level: intermediate

 47:    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
 48:           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
 49:           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.

 51: .keywords: matrix

 53: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
 54: @*/
 55: PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 56: {

 62:   PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
 63:   return(0);
 64: }

 68: PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 69: {
 70:   Mat_IS         *matis = (Mat_IS*)(B->data);
 71:   PetscSF        sf;
 72:   PetscInt       bs,i,nroots,*rootdata,nleaves,*leafdata,nlocalcols;
 73:   const PetscInt *gidxs;

 77:   if (!matis->A) {
 78:     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
 79:   }
 80:   MatGetLocalSize(B,&nroots,NULL);
 81:   MatGetSize(matis->A,&nleaves,&nlocalcols);
 82:   MatGetBlockSize(matis->A,&bs);
 83:   PetscCalloc2(nroots,&rootdata,nleaves,&leafdata);
 84:   PetscSFCreate(PetscObjectComm((PetscObject)B),&sf);
 85:   PetscSFSetFromOptions(sf);
 86:   ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);
 87:   PetscSFSetGraphLayout(sf,B->rmap,nleaves,NULL,PETSC_COPY_VALUES,gidxs);
 88:   ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);
 89:   if (!d_nnz) {
 90:     for (i=0;i<nroots;i++) rootdata[i] += d_nz;
 91:   } else {
 92:     for (i=0;i<nroots;i++) rootdata[i] += d_nnz[i];
 93:   }
 94:   if (!o_nnz) {
 95:     for (i=0;i<nroots;i++) rootdata[i] += o_nz;
 96:   } else {
 97:     for (i=0;i<nroots;i++) rootdata[i] += o_nnz[i];
 98:   }
 99:   PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
100:   PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
101:   for (i=0;i<nleaves;i++) {
102:     leafdata[i] = PetscMin(leafdata[i],nlocalcols);
103:   }
104:   MatSeqAIJSetPreallocation(matis->A,0,leafdata);
105:   for (i=0;i<nleaves/bs;i++) {
106:     leafdata[i] = leafdata[i*bs]/bs;
107:   }
108:   MatSeqBAIJSetPreallocation(matis->A,bs,0,leafdata);
109:   for (i=0;i<nleaves/bs;i++) {
110:     leafdata[i] = leafdata[i]-i;
111:   }
112:   MatSeqSBAIJSetPreallocation(matis->A,bs,0,leafdata);
113:   PetscSFDestroy(&sf);
114:   PetscFree2(rootdata,leafdata);
115:   return(0);
116: }

120: PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
121: {
122:   Mat_IS                 *matis = (Mat_IS*)(mat->data);
123:   /* info on mat */
124:   /* ISLocalToGlobalMapping rmapping,cmapping; */
125:   PetscInt               bs,rows,cols;
126:   PetscInt               lrows,lcols;
127:   PetscInt               local_rows,local_cols;
128:   PetscBool              isdense,issbaij,issbaij_red;
129:   /* values insertion */
130:   PetscScalar            *array;
131:   PetscInt               *local_indices,*global_indices;
132:   /* work */
133:   PetscInt               i,j,index_row;
134:   PetscErrorCode         ierr;

137:   /* MISSING CHECKS
138:     - rectangular case not covered (it is not allowed by MATIS)
139:   */
140:   /* get info from mat */
141:   /* MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping); */
142:   MatGetSize(mat,&rows,&cols);
143:   MatGetBlockSize(mat,&bs);
144:   MatGetSize(matis->A,&local_rows,&local_cols);
145:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
146:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);

148:   /* work */
149:   PetscMalloc1(local_rows,&local_indices);
150:   for (i=0;i<local_rows;i++) local_indices[i]=i;
151:   /* map indices of local mat to global values */
152:   PetscMalloc(PetscMax(local_cols,local_rows)*sizeof(*global_indices),&global_indices);
153:   /* ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices); */
154:   ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);

156:   if (issbaij) {
157:     MatGetRowUpperTriangular(matis->A);
158:   }

160:   if (reuse == MAT_INITIAL_MATRIX) {
161:     Mat         new_mat;
162:     MatType     new_mat_type;
163:     Vec         vec_dnz,vec_onz;
164:     PetscScalar *my_dnz,*my_onz;
165:     PetscInt    *dnz,*onz,*mat_ranges,*row_ownership;
166:     PetscInt    index_col,owner;
167:     PetscMPIInt nsubdomains;

169:     /* determining new matrix type */
170:     MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
171:     if (issbaij_red) {
172:       new_mat_type = MATSBAIJ;
173:     } else {
174:       if (bs>1) {
175:         new_mat_type = MATBAIJ;
176:       } else {
177:         new_mat_type = MATAIJ;
178:       }
179:     }

181:     MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
182:     MatCreate(PetscObjectComm((PetscObject)mat),&new_mat);
183:     MatSetSizes(new_mat,PETSC_DECIDE,PETSC_DECIDE,rows,cols);
184:     MatSetBlockSize(new_mat,bs);
185:     MatSetType(new_mat,new_mat_type);
186:     MatSetUp(new_mat);
187:     MatGetLocalSize(new_mat,&lrows,&lcols);

189:     /*
190:       preallocation
191:     */

193:     MatPreallocateInitialize(PetscObjectComm((PetscObject)new_mat),lrows,lcols,dnz,onz);
194:     /*
195:        Some vectors are needed to sum up properly on shared interface dofs.
196:        Preallocation macros cannot do the job.
197:        Note that preallocation is not exact, since it overestimates nonzeros
198:     */
199:     MatCreateVecs(new_mat,NULL,&vec_dnz);
200:     /* VecSetLocalToGlobalMapping(vec_dnz,rmapping); */
201:     VecSetLocalToGlobalMapping(vec_dnz,matis->mapping);
202:     VecDuplicate(vec_dnz,&vec_onz);
203:     /* All processes need to compute entire row ownership */
204:     PetscMalloc1(rows,&row_ownership);
205:     MatGetOwnershipRanges(new_mat,(const PetscInt**)&mat_ranges);
206:     for (i=0;i<nsubdomains;i++) {
207:       for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
208:         row_ownership[j]=i;
209:       }
210:     }

212:     /*
213:        my_dnz and my_onz contains exact contribution to preallocation from each local mat
214:        then, they will be summed up properly. This way, preallocation is always sufficient
215:     */
216:     PetscMalloc1(local_rows,&my_dnz);
217:     PetscMalloc1(local_rows,&my_onz);
218:     PetscMemzero(my_dnz,local_rows*sizeof(*my_dnz));
219:     PetscMemzero(my_onz,local_rows*sizeof(*my_onz));
220:     /* preallocation as a MATAIJ */
221:     if (isdense) { /* special case for dense local matrices */
222:       for (i=0;i<local_rows;i++) {
223:         index_row = global_indices[i];
224:         for (j=i;j<local_rows;j++) {
225:           owner = row_ownership[index_row];
226:           index_col = global_indices[j];
227:           if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
228:             my_dnz[i] += 1.0;
229:           } else { /* offdiag block */
230:             my_onz[i] += 1.0;
231:           }
232:           /* same as before, interchanging rows and cols */
233:           if (i != j) {
234:             owner = row_ownership[index_col];
235:             if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
236:               my_dnz[j] += 1.0;
237:             } else {
238:               my_onz[j] += 1.0;
239:             }
240:           }
241:         }
242:       }
243:     } else {
244:       for (i=0;i<local_rows;i++) {
245:         PetscInt ncols;
246:         const PetscInt *cols;
247:         index_row = global_indices[i];
248:         MatGetRow(matis->A,i,&ncols,&cols,NULL);
249:         for (j=0;j<ncols;j++) {
250:           owner = row_ownership[index_row];
251:           index_col = global_indices[cols[j]];
252:           if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
253:             my_dnz[i] += 1.0;
254:           } else { /* offdiag block */
255:             my_onz[i] += 1.0;
256:           }
257:           /* same as before, interchanging rows and cols */
258:           if (issbaij) {
259:             owner = row_ownership[index_col];
260:             if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
261:               my_dnz[j] += 1.0;
262:             } else {
263:               my_onz[j] += 1.0;
264:             }
265:           }
266:         }
267:         MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
268:       }
269:     }
270:     VecSet(vec_dnz,0.0);
271:     VecSet(vec_onz,0.0);
272:     if (local_rows) { /* multilevel guard */
273:       VecSetValuesLocal(vec_dnz,local_rows,local_indices,my_dnz,ADD_VALUES);
274:       VecSetValuesLocal(vec_onz,local_rows,local_indices,my_onz,ADD_VALUES);
275:     }
276:     VecAssemblyBegin(vec_dnz);
277:     VecAssemblyBegin(vec_onz);
278:     VecAssemblyEnd(vec_dnz);
279:     VecAssemblyEnd(vec_onz);
280:     PetscFree(my_dnz);
281:     PetscFree(my_onz);
282:     PetscFree(row_ownership);

284:     /* set computed preallocation in dnz and onz */
285:     VecGetArray(vec_dnz,&array);
286:     for (i=0; i<lrows; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]);
287:     VecRestoreArray(vec_dnz,&array);
288:     VecGetArray(vec_onz,&array);
289:     for (i=0;i<lrows;i++) onz[i] = (PetscInt)PetscRealPart(array[i]);
290:     VecRestoreArray(vec_onz,&array);
291:     VecDestroy(&vec_dnz);
292:     VecDestroy(&vec_onz);

294:     /* Resize preallocation if overestimated */
295:     for (i=0;i<lrows;i++) {
296:       dnz[i] = PetscMin(dnz[i],lcols);
297:       onz[i] = PetscMin(onz[i],cols-lcols);
298:     }
299:     /* set preallocation */
300:     MatMPIAIJSetPreallocation(new_mat,0,dnz,0,onz);
301:     for (i=0;i<lrows/bs;i++) {
302:       dnz[i] = dnz[i*bs]/bs;
303:       onz[i] = onz[i*bs]/bs;
304:     }
305:     MatMPIBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);
306:     for (i=0;i<lrows/bs;i++) {
307:       dnz[i] = dnz[i]-i;
308:     }
309:     MatMPISBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);
310:     MatPreallocateFinalize(dnz,onz);
311:     *M = new_mat;
312:   } else {
313:     PetscInt mbs,mrows,mcols;
314:     /* some checks */
315:     MatGetBlockSize(*M,&mbs);
316:     MatGetSize(*M,&mrows,&mcols);
317:     if (mrows != rows) {
318:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
319:     }
320:     if (mrows != rows) {
321:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
322:     }
323:     if (mbs != bs) {
324:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
325:     }
326:     MatZeroEntries(*M);
327:   }
328:   /* set local to global mappings */
329:   /* MatSetLocalToGlobalMapping(*M,rmapping,cmapping); */
330:   /* Set values */
331:   if (isdense) { /* special case for dense local matrices */
332:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
333:     MatDenseGetArray(matis->A,&array);
334:     MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);
335:     MatDenseRestoreArray(matis->A,&array);
336:     PetscFree(local_indices);
337:     PetscFree(global_indices);
338:   } else { /* very basic values insertion for all other matrix types */
339:     PetscFree(local_indices);
340:     for (i=0;i<local_rows;i++) {
341:       MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);
342:       /* MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES); */
343:       ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);
344:       ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);
345:       MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);
346:       MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);
347:     }
348:     PetscFree(global_indices);
349:   }
350:   MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
351:   MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
352:   if (isdense) {
353:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
354:   }
355:   if (issbaij) {
356:     MatRestoreRowUpperTriangular(matis->A);
357:   }
358:   return(0);
359: }

363: /*@
364:     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format

366:   Input Parameter:
367: .  mat - the matrix (should be of type MATIS)
368: .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX

370:   Output Parameter:
371: .  newmat - the matrix in AIJ format

373:   Level: developer

375:   Notes:

377: .seealso: MATIS
378: @*/
379: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
380: {

387:   if (reuse != MAT_INITIAL_MATRIX) {
390:   }
391:   PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
392:   return(0);
393: }

397: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
398: {
400:   Mat_IS         *matis = (Mat_IS*)(mat->data);
401:   PetscInt       bs,m,n,M,N;
402:   Mat            B,localmat;

405:   MatGetBlockSize(mat,&bs);
406:   MatGetSize(mat,&M,&N);
407:   MatGetLocalSize(mat,&m,&n);
408:   MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);
409:   MatDuplicate(matis->A,op,&localmat);
410:   MatISSetLocalMat(B,localmat);
411:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
412:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
413:   *newmat = B;
414:   return(0);
415: }

419: PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
420: {
422:   Mat_IS         *matis = (Mat_IS*)A->data;
423:   PetscBool      local_sym;

426:   MatIsHermitian(matis->A,tol,&local_sym);
427:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
428:   return(0);
429: }

433: PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
434: {
436:   Mat_IS         *matis = (Mat_IS*)A->data;
437:   PetscBool      local_sym;

440:   MatIsSymmetric(matis->A,tol,&local_sym);
441:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
442:   return(0);
443: }

447: PetscErrorCode MatDestroy_IS(Mat A)
448: {
450:   Mat_IS         *b = (Mat_IS*)A->data;

453:   MatDestroy(&b->A);
454:   VecScatterDestroy(&b->ctx);
455:   VecDestroy(&b->x);
456:   VecDestroy(&b->y);
457:   ISLocalToGlobalMappingDestroy(&b->mapping);
458:   PetscFree(A->data);
459:   PetscObjectChangeTypeName((PetscObject)A,0);
460:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
461:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
462:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
463:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
464:   return(0);
465: }

469: PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
470: {
472:   Mat_IS         *is  = (Mat_IS*)A->data;
473:   PetscScalar    zero = 0.0;

476:   /*  scatter the global vector x into the local work vector */
477:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
478:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

480:   /* multiply the local matrix */
481:   MatMult(is->A,is->x,is->y);

483:   /* scatter product back into global memory */
484:   VecSet(y,zero);
485:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
486:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
487:   return(0);
488: }

492: PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
493: {
494:   Vec            temp_vec;

498:   if (v3 != v2) {
499:     MatMult(A,v1,v3);
500:     VecAXPY(v3,1.0,v2);
501:   } else {
502:     VecDuplicate(v2,&temp_vec);
503:     MatMult(A,v1,temp_vec);
504:     VecAXPY(temp_vec,1.0,v2);
505:     VecCopy(temp_vec,v3);
506:     VecDestroy(&temp_vec);
507:   }
508:   return(0);
509: }

513: PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
514: {
515:   Mat_IS         *is = (Mat_IS*)A->data;

519:   /*  scatter the global vector x into the local work vector */
520:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
521:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

523:   /* multiply the local matrix */
524:   MatMultTranspose(is->A,is->x,is->y);

526:   /* scatter product back into global vector */
527:   VecSet(y,0);
528:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
529:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
530:   return(0);
531: }

535: PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
536: {
537:   Vec            temp_vec;

541:   if (v3 != v2) {
542:     MatMultTranspose(A,v1,v3);
543:     VecAXPY(v3,1.0,v2);
544:   } else {
545:     VecDuplicate(v2,&temp_vec);
546:     MatMultTranspose(A,v1,temp_vec);
547:     VecAXPY(temp_vec,1.0,v2);
548:     VecCopy(temp_vec,v3);
549:     VecDestroy(&temp_vec);
550:   }
551:   return(0);
552: }

556: PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
557: {
558:   Mat_IS         *a = (Mat_IS*)A->data;
560:   PetscViewer    sviewer;

563:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
564:   PetscViewerGetSingleton(viewer,&sviewer);
565:   MatView(a->A,sviewer);
566:   PetscViewerRestoreSingleton(viewer,&sviewer);
567:   return(0);
568: }

572: PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
573: {
575:   PetscInt       n,bs;
576:   Mat_IS         *is = (Mat_IS*)A->data;
577:   IS             from,to;
578:   Vec            global;

582:   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
583:   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
584:     ISLocalToGlobalMappingDestroy(&is->mapping);
585:     VecDestroy(&is->x);
586:     VecDestroy(&is->y);
587:     VecScatterDestroy(&is->ctx);
588:     MatDestroy(&is->A);
589:   }
590:   PetscObjectReference((PetscObject)rmapping);
591:   ISLocalToGlobalMappingDestroy(&is->mapping);
592:   is->mapping = rmapping;
593: /*
594:   PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
595:   PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);
596: */

598:   /* Create the local matrix A */
599:   ISLocalToGlobalMappingGetSize(rmapping,&n);
600:   ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);
601:   MatCreate(PETSC_COMM_SELF,&is->A);
602:   if (bs > 1) {
603:     MatSetType(is->A,MATSEQBAIJ);
604:   } else {
605:     MatSetType(is->A,MATSEQAIJ);
606:   }
607:   MatSetSizes(is->A,n,n,n,n);
608:   MatSetBlockSize(is->A,bs);
609:   MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
610:   MatAppendOptionsPrefix(is->A,"is_");
611:   MatSetFromOptions(is->A);

613:   /* Create the local work vectors */
614:   VecCreate(PETSC_COMM_SELF,&is->x);
615:   VecSetBlockSize(is->x,bs);
616:   VecSetSizes(is->x,n,n);
617:   VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);
618:   VecAppendOptionsPrefix(is->x,"is_");
619:   VecSetFromOptions(is->x);
620:   VecDuplicate(is->x,&is->y);

622:   /* setup the global to local scatter */
623:   ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);
624:   ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
625:   MatCreateVecs(A,&global,NULL);
626:   VecScatterCreate(global,from,is->x,to,&is->ctx);
627:   VecDestroy(&global);
628:   ISDestroy(&to);
629:   ISDestroy(&from);
630:   return(0);
631: }

635: PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
636: {
637:   Mat_IS         *is = (Mat_IS*)mat->data;
638:   PetscInt       rows_l[2048],cols_l[2048];

642: #if defined(PETSC_USE_DEBUG)
643:   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
644: #endif
645:   ISG2LMapApply(is->mapping,m,rows,rows_l);
646:   ISG2LMapApply(is->mapping,n,cols,cols_l);
647:   MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
648:   return(0);
649: }

651: #undef ISG2LMapSetUp
652: #undef ISG2LMapApply

656: PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
657: {
659:   Mat_IS         *is = (Mat_IS*)A->data;

662:   MatSetValues(is->A,m,rows,n,cols,values,addv);
663:   return(0);
664: }

668: PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
669: {
671:   Mat_IS         *is = (Mat_IS*)A->data;

674:   MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
675:   return(0);
676: }

680: PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
681: {
682:   Mat_IS         *is = (Mat_IS*)A->data;
683:   PetscInt       n_l = 0, *rows_l = NULL;

687:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
688:   if (n) {
689:     PetscMalloc1(n,&rows_l);
690:     ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);
691:   }
692:   MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);
693:   PetscFree(rows_l);
694:   return(0);
695: }

699: PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
700: {
701:   Mat_IS         *is = (Mat_IS*)A->data;
703:   PetscInt       i;
704:   PetscScalar    *array;

707:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
708:   {
709:     /*
710:        Set up is->x as a "counting vector". This is in order to MatMult_IS
711:        work properly in the interface nodes.
712:     */
713:     Vec         counter;
714:     PetscScalar one=1.0, zero=0.0;
715:     MatCreateVecs(A,&counter,NULL);
716:     VecSet(counter,zero);
717:     VecSet(is->x,one);
718:     VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
719:     VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
720:     VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
721:     VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
722:     VecDestroy(&counter);
723:   }
724:   if (!n) {
725:     is->pure_neumann = PETSC_TRUE;
726:   } else {
727:     is->pure_neumann = PETSC_FALSE;

729:     VecGetArray(is->x,&array);
730:     MatZeroRows(is->A,n,rows,diag,0,0);
731:     for (i=0; i<n; i++) {
732:       MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
733:     }
734:     MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
735:     MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);
736:     VecRestoreArray(is->x,&array);
737:   }
738:   return(0);
739: }

743: PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
744: {
745:   Mat_IS         *is = (Mat_IS*)A->data;

749:   MatAssemblyBegin(is->A,type);
750:   return(0);
751: }

755: PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
756: {
757:   Mat_IS         *is = (Mat_IS*)A->data;

761:   MatAssemblyEnd(is->A,type);
762:   return(0);
763: }

767: PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
768: {
769:   Mat_IS *is = (Mat_IS*)mat->data;

772:   *local = is->A;
773:   return(0);
774: }

778: /*@
779:     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.

781:   Input Parameter:
782: .  mat - the matrix

784:   Output Parameter:
785: .  local - the local matrix usually MATSEQAIJ

787:   Level: advanced

789:   Notes:
790:     This can be called if you have precomputed the nonzero structure of the
791:   matrix and want to provide it to the inner matrix object to improve the performance
792:   of the MatSetValues() operation.

794: .seealso: MATIS
795: @*/
796: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
797: {

803:   PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
804:   return(0);
805: }

809: PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
810: {
811:   Mat_IS         *is = (Mat_IS*)mat->data;
812:   PetscInt       nrows,ncols,orows,ocols;

816:   if (is->A) {
817:     MatGetSize(is->A,&orows,&ocols);
818:     MatGetSize(local,&nrows,&ncols);
819:     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
820:   }
821:   PetscObjectReference((PetscObject)local);
822:   MatDestroy(&is->A);
823:   is->A = local;
824:   return(0);
825: }

829: /*@
830:     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.

832:   Input Parameter:
833: .  mat - the matrix
834: .  local - the local matrix usually MATSEQAIJ

836:   Output Parameter:

838:   Level: advanced

840:   Notes:
841:     This can be called if you have precomputed the local matrix and
842:   want to provide it to the matrix object MATIS.

844: .seealso: MATIS
845: @*/
846: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
847: {

853:   PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
854:   return(0);
855: }

859: PetscErrorCode MatZeroEntries_IS(Mat A)
860: {
861:   Mat_IS         *a = (Mat_IS*)A->data;

865:   MatZeroEntries(a->A);
866:   return(0);
867: }

871: PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
872: {
873:   Mat_IS         *is = (Mat_IS*)A->data;

877:   MatScale(is->A,a);
878:   return(0);
879: }

883: PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
884: {
885:   Mat_IS         *is = (Mat_IS*)A->data;

889:   /* get diagonal of the local matrix */
890:   MatGetDiagonal(is->A,is->x);

892:   /* scatter diagonal back into global vector */
893:   VecSet(v,0);
894:   VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
895:   VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
896:   return(0);
897: }

901: PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
902: {
903:   Mat_IS         *a = (Mat_IS*)A->data;

907:   MatSetOption(a->A,op,flg);
908:   return(0);
909: }

913: /*@
914:     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
915:        process but not across processes.

917:    Input Parameters:
918: +     comm - MPI communicator that will share the matrix
919: .     bs - local and global block size of the matrix
920: .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
921: -     map - mapping that defines the global number for each local number

923:    Output Parameter:
924: .    A - the resulting matrix

926:    Level: advanced

928:    Notes: See MATIS for more details
929:           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
930:           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
931:           plus the ghost points to global indices.

933: .seealso: MATIS, MatSetLocalToGlobalMapping()
934: @*/
935: PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
936: {

940:   MatCreate(comm,A);
941:   MatSetBlockSize(*A,bs);
942:   MatSetSizes(*A,m,n,M,N);
943:   MatSetType(*A,MATIS);
944:   MatSetUp(*A);
945:   MatSetLocalToGlobalMapping(*A,map,map);
946:   return(0);
947: }

949: /*MC
950:    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
951:    This stores the matrices in globally unassembled form. Each processor
952:    assembles only its local Neumann problem and the parallel matrix vector
953:    product is handled "implicitly".

955:    Operations Provided:
956: +  MatMult()
957: .  MatMultAdd()
958: .  MatMultTranspose()
959: .  MatMultTransposeAdd()
960: .  MatZeroEntries()
961: .  MatSetOption()
962: .  MatZeroRows()
963: .  MatZeroRowsLocal()
964: .  MatSetValues()
965: .  MatSetValuesLocal()
966: .  MatScale()
967: .  MatGetDiagonal()
968: -  MatSetLocalToGlobalMapping()

970:    Options Database Keys:
971: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()

973:    Notes: Options prefix for the inner matrix are given by -is_mat_xxx

975:           You must call MatSetLocalToGlobalMapping() before using this matrix type.

977:           You can do matrix preallocation on the local matrix after you obtain it with
978:           MatISGetLocalMat()

980:   Level: advanced

982: .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC

984: M*/

988: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
989: {
991:   Mat_IS         *b;

994:   PetscNewLog(A,&b);
995:   A->data = (void*)b;
996:   PetscMemzero(A->ops,sizeof(struct _MatOps));

998:   A->ops->mult                    = MatMult_IS;
999:   A->ops->multadd                 = MatMultAdd_IS;
1000:   A->ops->multtranspose           = MatMultTranspose_IS;
1001:   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1002:   A->ops->destroy                 = MatDestroy_IS;
1003:   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1004:   A->ops->setvalues               = MatSetValues_IS;
1005:   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1006:   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1007:   A->ops->zerorows                = MatZeroRows_IS;
1008:   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1009:   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1010:   A->ops->assemblyend             = MatAssemblyEnd_IS;
1011:   A->ops->view                    = MatView_IS;
1012:   A->ops->zeroentries             = MatZeroEntries_IS;
1013:   A->ops->scale                   = MatScale_IS;
1014:   A->ops->getdiagonal             = MatGetDiagonal_IS;
1015:   A->ops->setoption               = MatSetOption_IS;
1016:   A->ops->ishermitian             = MatIsHermitian_IS;
1017:   A->ops->issymmetric             = MatIsSymmetric_IS;
1018:   A->ops->duplicate               = MatDuplicate_IS;

1020:   PetscLayoutSetUp(A->rmap);
1021:   PetscLayoutSetUp(A->cmap);

1023:   /* zeros pointer */
1024:   b->A       = 0;
1025:   b->ctx     = 0;
1026:   b->x       = 0;
1027:   b->y       = 0;
1028:   b->mapping = 0;

1030:   /* special MATIS functions */
1031:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
1032:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
1033:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
1034:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
1035:   PetscObjectChangeTypeName((PetscObject)A,MATIS);
1036:   return(0);
1037: }