Actual source code: matis.c

petsc-master 2014-12-18
Report Typos and Errors
  2: /*
  3:     Creates a matrix class for using the Neumann-Neumann type preconditioners.
  4:    This stores the matrices in globally unassembled form. Each processor
  5:    assembles only its local Neumann problem and the parallel matrix vector
  6:    product is handled "implicitly".

  8:      We provide:
  9:          MatMult()

 11:     Currently this allows for only one subdomain per processor.

 13: */

 15: #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
 16: #include <petscsf.h>

 20: /*
 21:    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.

 23:    Collective on MPI_Comm

 25:    Input Parameters:
 26: +  B - the matrix
 27: .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
 28:            (same value is used for all local rows)
 29: .  d_nnz - array containing the number of nonzeros in the various rows of the
 30:            DIAGONAL portion of the local submatrix (possibly different for each row)
 31:            or NULL, if d_nz is used to specify the nonzero structure.
 32:            The size of this array is equal to the number of local rows, i.e 'm'.
 33:            For matrices that will be factored, you must leave room for (and set)
 34:            the diagonal entry even if it is zero.
 35: .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
 36:            submatrix (same value is used for all local rows).
 37: -  o_nnz - array containing the number of nonzeros in the various rows of the
 38:            OFF-DIAGONAL portion of the local submatrix (possibly different for
 39:            each row) or NULL, if o_nz is used to specify the nonzero
 40:            structure. The size of this array is equal to the number
 41:            of local rows, i.e 'm'.

 43:    If the *_nnz parameter is given then the *_nz parameter is ignored

 45:    Level: intermediate

 47:    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
 48:           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
 49:           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.

 51: .keywords: matrix

 53: .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
 54: @*/
 55: PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 56: {

 62:   PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));
 63:   return(0);
 64: }

 68: PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
 69: {
 70:   Mat_IS         *matis = (Mat_IS*)(B->data);
 71:   PetscSF        sf;
 72:   PetscInt       bs,i,nroots,*rootdata,nleaves,*leafdata,nlocalcols;
 73:   const PetscInt *gidxs;

 77:   if (!matis->A) {
 78:     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
 79:   }
 80:   MatGetLocalSize(B,&nroots,NULL);
 81:   MatGetSize(matis->A,&nleaves,&nlocalcols);
 82:   MatGetBlockSize(matis->A,&bs);
 83:   PetscCalloc2(nroots,&rootdata,nleaves,&leafdata);
 84:   PetscSFCreate(PetscObjectComm((PetscObject)B),&sf);
 85:   PetscSFSetFromOptions(sf);
 86:   ISLocalToGlobalMappingGetIndices(matis->mapping,&gidxs);
 87:   PetscSFSetGraphLayout(sf,B->rmap,nleaves,NULL,PETSC_COPY_VALUES,gidxs);
 88:   ISLocalToGlobalMappingRestoreIndices(matis->mapping,&gidxs);
 89:   if (!d_nnz) {
 90:     for (i=0;i<nroots;i++) rootdata[i] += d_nz;
 91:   } else {
 92:     for (i=0;i<nroots;i++) rootdata[i] += d_nnz[i];
 93:   }
 94:   if (!o_nnz) {
 95:     for (i=0;i<nroots;i++) rootdata[i] += o_nz;
 96:   } else {
 97:     for (i=0;i<nroots;i++) rootdata[i] += o_nnz[i];
 98:   }
 99:   PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
100:   PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
101:   for (i=0;i<nleaves;i++) {
102:     leafdata[i] = PetscMin(leafdata[i],nlocalcols);
103:   }
104:   MatSeqAIJSetPreallocation(matis->A,0,leafdata);
105:   for (i=0;i<nleaves/bs;i++) {
106:     leafdata[i] = leafdata[i*bs]/bs;
107:   }
108:   MatSeqBAIJSetPreallocation(matis->A,bs,0,leafdata);
109:   for (i=0;i<nleaves/bs;i++) {
110:     leafdata[i] = leafdata[i]-i;
111:   }
112:   MatSeqSBAIJSetPreallocation(matis->A,bs,0,leafdata);
113:   PetscSFDestroy(&sf);
114:   PetscFree2(rootdata,leafdata);
115:   return(0);
116: }

120: PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
121: {
122:   Mat_IS                 *matis = (Mat_IS*)(mat->data);
123:   /* info on mat */
124:   /* ISLocalToGlobalMapping rmapping,cmapping; */
125:   PetscInt               bs,rows,cols;
126:   PetscInt               lrows,lcols;
127:   PetscInt               local_rows,local_cols;
128:   PetscBool              isdense,issbaij,issbaij_red;
129:   PetscMPIInt            nsubdomains;
130:   /* values insertion */
131:   PetscScalar            *array;
132:   PetscInt               *local_indices,*global_indices;
133:   /* work */
134:   PetscInt               i,j,index_row;
135:   PetscErrorCode         ierr;

138:   /* MISSING CHECKS
139:     - rectangular case not covered (it is not allowed by MATIS)
140:   */
141:   /* get info from mat */
142:   MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);
143:   if (nsubdomains == 1) {
144:     if (reuse == MAT_INITIAL_MATRIX) {
145:       MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));
146:     } else {
147:       MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);
148:     }
149:     return(0);
150:   }
151:   /* MatGetLocalToGlobalMapping(mat,&rmapping,&cmapping); */
152:   MatGetSize(mat,&rows,&cols);
153:   MatGetBlockSize(mat,&bs);
154:   MatGetSize(matis->A,&local_rows,&local_cols);
155:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);
156:   PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);

158:   /* work */
159:   PetscMalloc1(local_rows,&local_indices);
160:   for (i=0;i<local_rows;i++) local_indices[i]=i;
161:   /* map indices of local mat to global values */
162:   PetscMalloc1(PetscMax(local_cols,local_rows),&global_indices);
163:   /* ISLocalToGlobalMappingApply(rmapping,local_rows,local_indices,global_indices); */
164:   ISLocalToGlobalMappingApply(matis->mapping,local_rows,local_indices,global_indices);

166:   if (issbaij) {
167:     MatGetRowUpperTriangular(matis->A);
168:   }

170:   if (reuse == MAT_INITIAL_MATRIX) {
171:     Mat         new_mat;
172:     MatType     new_mat_type;
173:     Vec         vec_dnz,vec_onz;
174:     PetscScalar *my_dnz,*my_onz;
175:     PetscInt    *dnz,*onz,*mat_ranges,*row_ownership;
176:     PetscInt    index_col,owner;

178:     /* determining new matrix type */
179:     MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));
180:     if (issbaij_red) {
181:       new_mat_type = MATSBAIJ;
182:     } else {
183:       if (bs>1) {
184:         new_mat_type = MATBAIJ;
185:       } else {
186:         new_mat_type = MATAIJ;
187:       }
188:     }

190:     MatCreate(PetscObjectComm((PetscObject)mat),&new_mat);
191:     MatSetSizes(new_mat,PETSC_DECIDE,PETSC_DECIDE,rows,cols);
192:     MatSetBlockSize(new_mat,bs);
193:     MatSetType(new_mat,new_mat_type);
194:     MatSetUp(new_mat);
195:     MatGetLocalSize(new_mat,&lrows,&lcols);

197:     /*
198:       preallocation
199:     */

201:     MatPreallocateInitialize(PetscObjectComm((PetscObject)new_mat),lrows,lcols,dnz,onz);
202:     /*
203:        Some vectors are needed to sum up properly on shared interface dofs.
204:        Preallocation macros cannot do the job.
205:        Note that preallocation is not exact, since it overestimates nonzeros
206:     */
207:     MatCreateVecs(new_mat,NULL,&vec_dnz);
208:     /* VecSetLocalToGlobalMapping(vec_dnz,rmapping); */
209:     VecSetLocalToGlobalMapping(vec_dnz,matis->mapping);
210:     VecDuplicate(vec_dnz,&vec_onz);
211:     /* All processes need to compute entire row ownership */
212:     PetscMalloc1(rows,&row_ownership);
213:     MatGetOwnershipRanges(new_mat,(const PetscInt**)&mat_ranges);
214:     for (i=0;i<nsubdomains;i++) {
215:       for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
216:         row_ownership[j]=i;
217:       }
218:     }

220:     /*
221:        my_dnz and my_onz contains exact contribution to preallocation from each local mat
222:        then, they will be summed up properly. This way, preallocation is always sufficient
223:     */
224:     PetscMalloc1(local_rows,&my_dnz);
225:     PetscMalloc1(local_rows,&my_onz);
226:     PetscMemzero(my_dnz,local_rows*sizeof(*my_dnz));
227:     PetscMemzero(my_onz,local_rows*sizeof(*my_onz));
228:     /* preallocation as a MATAIJ */
229:     if (isdense) { /* special case for dense local matrices */
230:       for (i=0;i<local_rows;i++) {
231:         index_row = global_indices[i];
232:         for (j=i;j<local_rows;j++) {
233:           owner = row_ownership[index_row];
234:           index_col = global_indices[j];
235:           if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
236:             my_dnz[i] += 1.0;
237:           } else { /* offdiag block */
238:             my_onz[i] += 1.0;
239:           }
240:           /* same as before, interchanging rows and cols */
241:           if (i != j) {
242:             owner = row_ownership[index_col];
243:             if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
244:               my_dnz[j] += 1.0;
245:             } else {
246:               my_onz[j] += 1.0;
247:             }
248:           }
249:         }
250:       }
251:     } else {
252:       for (i=0;i<local_rows;i++) {
253:         PetscInt ncols;
254:         const PetscInt *cols;
255:         index_row = global_indices[i];
256:         MatGetRow(matis->A,i,&ncols,&cols,NULL);
257:         for (j=0;j<ncols;j++) {
258:           owner = row_ownership[index_row];
259:           index_col = global_indices[cols[j]];
260:           if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
261:             my_dnz[i] += 1.0;
262:           } else { /* offdiag block */
263:             my_onz[i] += 1.0;
264:           }
265:           /* same as before, interchanging rows and cols */
266:           if (issbaij) {
267:             owner = row_ownership[index_col];
268:             if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
269:               my_dnz[j] += 1.0;
270:             } else {
271:               my_onz[j] += 1.0;
272:             }
273:           }
274:         }
275:         MatRestoreRow(matis->A,i,&ncols,&cols,NULL);
276:       }
277:     }
278:     VecSet(vec_dnz,0.0);
279:     VecSet(vec_onz,0.0);
280:     if (local_rows) { /* multilevel guard */
281:       VecSetValuesLocal(vec_dnz,local_rows,local_indices,my_dnz,ADD_VALUES);
282:       VecSetValuesLocal(vec_onz,local_rows,local_indices,my_onz,ADD_VALUES);
283:     }
284:     VecAssemblyBegin(vec_dnz);
285:     VecAssemblyBegin(vec_onz);
286:     VecAssemblyEnd(vec_dnz);
287:     VecAssemblyEnd(vec_onz);
288:     PetscFree(my_dnz);
289:     PetscFree(my_onz);
290:     PetscFree(row_ownership);

292:     /* set computed preallocation in dnz and onz */
293:     VecGetArray(vec_dnz,&array);
294:     for (i=0; i<lrows; i++) dnz[i] = (PetscInt)PetscRealPart(array[i]);
295:     VecRestoreArray(vec_dnz,&array);
296:     VecGetArray(vec_onz,&array);
297:     for (i=0;i<lrows;i++) onz[i] = (PetscInt)PetscRealPart(array[i]);
298:     VecRestoreArray(vec_onz,&array);
299:     VecDestroy(&vec_dnz);
300:     VecDestroy(&vec_onz);

302:     /* Resize preallocation if overestimated */
303:     for (i=0;i<lrows;i++) {
304:       dnz[i] = PetscMin(dnz[i],lcols);
305:       onz[i] = PetscMin(onz[i],cols-lcols);
306:     }
307:     /* set preallocation */
308:     MatMPIAIJSetPreallocation(new_mat,0,dnz,0,onz);
309:     for (i=0;i<lrows/bs;i++) {
310:       dnz[i] = dnz[i*bs]/bs;
311:       onz[i] = onz[i*bs]/bs;
312:     }
313:     MatMPIBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);
314:     for (i=0;i<lrows/bs;i++) {
315:       dnz[i] = dnz[i]-i;
316:     }
317:     MatMPISBAIJSetPreallocation(new_mat,bs,0,dnz,0,onz);
318:     MatPreallocateFinalize(dnz,onz);
319:     *M = new_mat;
320:   } else {
321:     PetscInt mbs,mrows,mcols;
322:     /* some checks */
323:     MatGetBlockSize(*M,&mbs);
324:     MatGetSize(*M,&mrows,&mcols);
325:     if (mrows != rows) {
326:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
327:     }
328:     if (mrows != rows) {
329:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
330:     }
331:     if (mbs != bs) {
332:       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
333:     }
334:     MatZeroEntries(*M);
335:   }
336:   /* set local to global mappings */
337:   /* MatSetLocalToGlobalMapping(*M,rmapping,cmapping); */
338:   /* Set values */
339:   if (isdense) { /* special case for dense local matrices */
340:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);
341:     MatDenseGetArray(matis->A,&array);
342:     MatSetValues(*M,local_rows,global_indices,local_cols,global_indices,array,ADD_VALUES);
343:     MatDenseRestoreArray(matis->A,&array);
344:     PetscFree(local_indices);
345:     PetscFree(global_indices);
346:   } else { /* very basic values insertion for all other matrix types */
347:     PetscFree(local_indices);
348:     for (i=0;i<local_rows;i++) {
349:       MatGetRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);
350:       /* MatSetValuesLocal(*M,1,&i,j,local_indices,array,ADD_VALUES); */
351:       ISLocalToGlobalMappingApply(matis->mapping,j,local_indices,global_indices);
352:       ISLocalToGlobalMappingApply(matis->mapping,1,&i,&index_row);
353:       MatSetValues(*M,1,&index_row,j,global_indices,array,ADD_VALUES);
354:       MatRestoreRow(matis->A,i,&j,(const PetscInt**)&local_indices,(const PetscScalar**)&array);
355:     }
356:     PetscFree(global_indices);
357:   }
358:   MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);
359:   MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);
360:   if (isdense) {
361:     MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);
362:   }
363:   if (issbaij) {
364:     MatRestoreRowUpperTriangular(matis->A);
365:   }
366:   return(0);
367: }

371: /*@
372:     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format

374:   Input Parameter:
375: .  mat - the matrix (should be of type MATIS)
376: .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX

378:   Output Parameter:
379: .  newmat - the matrix in AIJ format

381:   Level: developer

383:   Notes:

385: .seealso: MATIS
386: @*/
387: PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
388: {

395:   if (reuse != MAT_INITIAL_MATRIX) {
398:   }
399:   PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));
400:   return(0);
401: }

405: PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
406: {
408:   Mat_IS         *matis = (Mat_IS*)(mat->data);
409:   PetscInt       bs,m,n,M,N;
410:   Mat            B,localmat;

413:   MatGetBlockSize(mat,&bs);
414:   MatGetSize(mat,&M,&N);
415:   MatGetLocalSize(mat,&m,&n);
416:   MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,matis->mapping,&B);
417:   MatDuplicate(matis->A,op,&localmat);
418:   MatISSetLocalMat(B,localmat);
419:   MatDestroy(&localmat);
420:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
421:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
422:   *newmat = B;
423:   return(0);
424: }

428: PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
429: {
431:   Mat_IS         *matis = (Mat_IS*)A->data;
432:   PetscBool      local_sym;

435:   MatIsHermitian(matis->A,tol,&local_sym);
436:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
437:   return(0);
438: }

442: PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
443: {
445:   Mat_IS         *matis = (Mat_IS*)A->data;
446:   PetscBool      local_sym;

449:   MatIsSymmetric(matis->A,tol,&local_sym);
450:   MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
451:   return(0);
452: }

456: PetscErrorCode MatDestroy_IS(Mat A)
457: {
459:   Mat_IS         *b = (Mat_IS*)A->data;

462:   MatDestroy(&b->A);
463:   VecScatterDestroy(&b->ctx);
464:   VecDestroy(&b->x);
465:   VecDestroy(&b->y);
466:   ISLocalToGlobalMappingDestroy(&b->mapping);
467:   PetscFree(A->data);
468:   PetscObjectChangeTypeName((PetscObject)A,0);
469:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);
470:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);
471:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);
472:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);
473:   return(0);
474: }

478: PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
479: {
481:   Mat_IS         *is  = (Mat_IS*)A->data;
482:   PetscScalar    zero = 0.0;

485:   /*  scatter the global vector x into the local work vector */
486:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
487:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

489:   /* multiply the local matrix */
490:   MatMult(is->A,is->x,is->y);

492:   /* scatter product back into global memory */
493:   VecSet(y,zero);
494:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
495:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
496:   return(0);
497: }

501: PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
502: {
503:   Vec            temp_vec;

507:   if (v3 != v2) {
508:     MatMult(A,v1,v3);
509:     VecAXPY(v3,1.0,v2);
510:   } else {
511:     VecDuplicate(v2,&temp_vec);
512:     MatMult(A,v1,temp_vec);
513:     VecAXPY(temp_vec,1.0,v2);
514:     VecCopy(temp_vec,v3);
515:     VecDestroy(&temp_vec);
516:   }
517:   return(0);
518: }

522: PetscErrorCode MatMultTranspose_IS(Mat A,Vec x,Vec y)
523: {
524:   Mat_IS         *is = (Mat_IS*)A->data;

528:   /*  scatter the global vector x into the local work vector */
529:   VecScatterBegin(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);
530:   VecScatterEnd(is->ctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);

532:   /* multiply the local matrix */
533:   MatMultTranspose(is->A,is->x,is->y);

535:   /* scatter product back into global vector */
536:   VecSet(y,0);
537:   VecScatterBegin(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
538:   VecScatterEnd(is->ctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);
539:   return(0);
540: }

544: PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
545: {
546:   Vec            temp_vec;

550:   if (v3 != v2) {
551:     MatMultTranspose(A,v1,v3);
552:     VecAXPY(v3,1.0,v2);
553:   } else {
554:     VecDuplicate(v2,&temp_vec);
555:     MatMultTranspose(A,v1,temp_vec);
556:     VecAXPY(temp_vec,1.0,v2);
557:     VecCopy(temp_vec,v3);
558:     VecDestroy(&temp_vec);
559:   }
560:   return(0);
561: }

565: PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
566: {
567:   Mat_IS         *a = (Mat_IS*)A->data;
569:   PetscViewer    sviewer;

572:   PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
573:   PetscViewerGetSingleton(viewer,&sviewer);
574:   MatView(a->A,sviewer);
575:   PetscViewerRestoreSingleton(viewer,&sviewer);
576:   return(0);
577: }

581: PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
582: {
584:   PetscInt       n,bs;
585:   Mat_IS         *is = (Mat_IS*)A->data;
586:   IS             from,to;
587:   Vec            global;

591:   if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
592:   if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
593:     ISLocalToGlobalMappingDestroy(&is->mapping);
594:     VecDestroy(&is->x);
595:     VecDestroy(&is->y);
596:     VecScatterDestroy(&is->ctx);
597:     MatDestroy(&is->A);
598:   }
599:   PetscObjectReference((PetscObject)rmapping);
600:   ISLocalToGlobalMappingDestroy(&is->mapping);
601:   is->mapping = rmapping;
602: /*
603:   PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);
604:   PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);
605: */

607:   /* Create the local matrix A */
608:   ISLocalToGlobalMappingGetSize(rmapping,&n);
609:   ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);
610:   MatCreate(PETSC_COMM_SELF,&is->A);
611:   if (bs > 1) {
612:     MatSetType(is->A,MATSEQBAIJ);
613:   } else {
614:     MatSetType(is->A,MATSEQAIJ);
615:   }
616:   MatSetSizes(is->A,n,n,n,n);
617:   MatSetBlockSize(is->A,bs);
618:   MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);
619:   MatAppendOptionsPrefix(is->A,"is_");
620:   MatSetFromOptions(is->A);

622:   /* Create the local work vectors */
623:   VecCreate(PETSC_COMM_SELF,&is->x);
624:   VecSetBlockSize(is->x,bs);
625:   VecSetSizes(is->x,n,n);
626:   VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);
627:   VecAppendOptionsPrefix(is->x,"is_");
628:   VecSetFromOptions(is->x);
629:   VecDuplicate(is->x,&is->y);

631:   /* setup the global to local scatter */
632:   ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);
633:   ISLocalToGlobalMappingApplyIS(rmapping,to,&from);
634:   MatCreateVecs(A,&global,NULL);
635:   VecScatterCreate(global,from,is->x,to,&is->ctx);
636:   VecDestroy(&global);
637:   ISDestroy(&to);
638:   ISDestroy(&from);
639:   return(0);
640: }

644: PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
645: {
646:   Mat_IS         *is = (Mat_IS*)mat->data;
647:   PetscInt       rows_l[2048],cols_l[2048];

651: #if defined(PETSC_USE_DEBUG)
652:   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
653: #endif
654:   ISG2LMapApply(is->mapping,m,rows,rows_l);
655:   ISG2LMapApply(is->mapping,n,cols,cols_l);
656:   MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);
657:   return(0);
658: }

660: #undef ISG2LMapSetUp
661: #undef ISG2LMapApply

665: PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
666: {
668:   Mat_IS         *is = (Mat_IS*)A->data;

671:   MatSetValues(is->A,m,rows,n,cols,values,addv);
672:   return(0);
673: }

677: PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
678: {
680:   Mat_IS         *is = (Mat_IS*)A->data;

683:   MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);
684:   return(0);
685: }

689: PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
690: {
691:   Mat_IS         *is = (Mat_IS*)A->data;
692:   PetscInt       n_l = 0, *rows_l = NULL;

696:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
697:   if (n) {
698:     PetscMalloc1(n,&rows_l);
699:     ISGlobalToLocalMappingApply(is->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);
700:   }
701:   MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);
702:   PetscFree(rows_l);
703:   return(0);
704: }

708: PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
709: {
710:   Mat_IS         *is = (Mat_IS*)A->data;
712:   PetscInt       i;
713:   PetscScalar    *array;

716:   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
717:   {
718:     /*
719:        Set up is->x as a "counting vector". This is in order to MatMult_IS
720:        work properly in the interface nodes.
721:     */
722:     Vec         counter;
723:     PetscScalar one=1.0, zero=0.0;
724:     MatCreateVecs(A,&counter,NULL);
725:     VecSet(counter,zero);
726:     VecSet(is->x,one);
727:     VecScatterBegin(is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
728:     VecScatterEnd  (is->ctx,is->x,counter,ADD_VALUES,SCATTER_REVERSE);
729:     VecScatterBegin(is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
730:     VecScatterEnd  (is->ctx,counter,is->x,INSERT_VALUES,SCATTER_FORWARD);
731:     VecDestroy(&counter);
732:   }
733:   if (!n) {
734:     is->pure_neumann = PETSC_TRUE;
735:   } else {
736:     is->pure_neumann = PETSC_FALSE;

738:     VecGetArray(is->x,&array);
739:     MatZeroRows(is->A,n,rows,diag,0,0);
740:     for (i=0; i<n; i++) {
741:       MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);
742:     }
743:     MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);
744:     MatAssemblyEnd  (is->A,MAT_FINAL_ASSEMBLY);
745:     VecRestoreArray(is->x,&array);
746:   }
747:   return(0);
748: }

752: PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
753: {
754:   Mat_IS         *is = (Mat_IS*)A->data;

758:   MatAssemblyBegin(is->A,type);
759:   return(0);
760: }

764: PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
765: {
766:   Mat_IS         *is = (Mat_IS*)A->data;

770:   MatAssemblyEnd(is->A,type);
771:   return(0);
772: }

776: PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
777: {
778:   Mat_IS *is = (Mat_IS*)mat->data;

781:   *local = is->A;
782:   return(0);
783: }

787: /*@
788:     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.

790:   Input Parameter:
791: .  mat - the matrix

793:   Output Parameter:
794: .  local - the local matrix usually MATSEQAIJ

796:   Level: advanced

798:   Notes:
799:     This can be called if you have precomputed the nonzero structure of the
800:   matrix and want to provide it to the inner matrix object to improve the performance
801:   of the MatSetValues() operation.

803: .seealso: MATIS
804: @*/
805: PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
806: {

812:   PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));
813:   return(0);
814: }

818: PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
819: {
820:   Mat_IS         *is = (Mat_IS*)mat->data;
821:   PetscInt       nrows,ncols,orows,ocols;

825:   if (is->A) {
826:     MatGetSize(is->A,&orows,&ocols);
827:     MatGetSize(local,&nrows,&ncols);
828:     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
829:   }
830:   PetscObjectReference((PetscObject)local);
831:   MatDestroy(&is->A);
832:   is->A = local;
833:   return(0);
834: }

838: /*@
839:     MatISSetLocalMat - Set the local matrix stored inside a MATIS matrix.

841:   Input Parameter:
842: .  mat - the matrix
843: .  local - the local matrix usually MATSEQAIJ

845:   Output Parameter:

847:   Level: advanced

849:   Notes:
850:     This can be called if you have precomputed the local matrix and
851:   want to provide it to the matrix object MATIS.

853: .seealso: MATIS
854: @*/
855: PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
856: {

862:   PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));
863:   return(0);
864: }

868: PetscErrorCode MatZeroEntries_IS(Mat A)
869: {
870:   Mat_IS         *a = (Mat_IS*)A->data;

874:   MatZeroEntries(a->A);
875:   return(0);
876: }

880: PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
881: {
882:   Mat_IS         *is = (Mat_IS*)A->data;

886:   MatScale(is->A,a);
887:   return(0);
888: }

892: PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
893: {
894:   Mat_IS         *is = (Mat_IS*)A->data;

898:   /* get diagonal of the local matrix */
899:   MatGetDiagonal(is->A,is->x);

901:   /* scatter diagonal back into global vector */
902:   VecSet(v,0);
903:   VecScatterBegin(is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
904:   VecScatterEnd  (is->ctx,is->x,v,ADD_VALUES,SCATTER_REVERSE);
905:   return(0);
906: }

910: PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
911: {
912:   Mat_IS         *a = (Mat_IS*)A->data;

916:   MatSetOption(a->A,op,flg);
917:   return(0);
918: }

922: /*@
923:     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
924:        process but not across processes.

926:    Input Parameters:
927: +     comm - MPI communicator that will share the matrix
928: .     bs - local and global block size of the matrix
929: .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
930: -     map - mapping that defines the global number for each local number

932:    Output Parameter:
933: .    A - the resulting matrix

935:    Level: advanced

937:    Notes: See MATIS for more details
938:           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
939:           by that process. m + nghosts (or n + nghosts) is the length of map since map maps all local points
940:           plus the ghost points to global indices.

942: .seealso: MATIS, MatSetLocalToGlobalMapping()
943: @*/
944: PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping map,Mat *A)
945: {

949:   MatCreate(comm,A);
950:   MatSetBlockSize(*A,bs);
951:   MatSetSizes(*A,m,n,M,N);
952:   MatSetType(*A,MATIS);
953:   MatSetUp(*A);
954:   MatSetLocalToGlobalMapping(*A,map,map);
955:   return(0);
956: }

958: /*MC
959:    MATIS - MATIS = "is" - A matrix type to be used for using the Neumann-Neumann type preconditioners, see PCBDDC.
960:    This stores the matrices in globally unassembled form. Each processor
961:    assembles only its local Neumann problem and the parallel matrix vector
962:    product is handled "implicitly".

964:    Operations Provided:
965: +  MatMult()
966: .  MatMultAdd()
967: .  MatMultTranspose()
968: .  MatMultTransposeAdd()
969: .  MatZeroEntries()
970: .  MatSetOption()
971: .  MatZeroRows()
972: .  MatZeroRowsLocal()
973: .  MatSetValues()
974: .  MatSetValuesLocal()
975: .  MatScale()
976: .  MatGetDiagonal()
977: -  MatSetLocalToGlobalMapping()

979:    Options Database Keys:
980: . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()

982:    Notes: Options prefix for the inner matrix are given by -is_mat_xxx

984:           You must call MatSetLocalToGlobalMapping() before using this matrix type.

986:           You can do matrix preallocation on the local matrix after you obtain it with
987:           MatISGetLocalMat()

989:   Level: advanced

991: .seealso: PC, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), PCBDDC

993: M*/

997: PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
998: {
1000:   Mat_IS         *b;

1003:   PetscNewLog(A,&b);
1004:   A->data = (void*)b;
1005:   PetscMemzero(A->ops,sizeof(struct _MatOps));

1007:   A->ops->mult                    = MatMult_IS;
1008:   A->ops->multadd                 = MatMultAdd_IS;
1009:   A->ops->multtranspose           = MatMultTranspose_IS;
1010:   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1011:   A->ops->destroy                 = MatDestroy_IS;
1012:   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1013:   A->ops->setvalues               = MatSetValues_IS;
1014:   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1015:   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1016:   A->ops->zerorows                = MatZeroRows_IS;
1017:   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1018:   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1019:   A->ops->assemblyend             = MatAssemblyEnd_IS;
1020:   A->ops->view                    = MatView_IS;
1021:   A->ops->zeroentries             = MatZeroEntries_IS;
1022:   A->ops->scale                   = MatScale_IS;
1023:   A->ops->getdiagonal             = MatGetDiagonal_IS;
1024:   A->ops->setoption               = MatSetOption_IS;
1025:   A->ops->ishermitian             = MatIsHermitian_IS;
1026:   A->ops->issymmetric             = MatIsSymmetric_IS;
1027:   A->ops->duplicate               = MatDuplicate_IS;

1029:   PetscLayoutSetUp(A->rmap);
1030:   PetscLayoutSetUp(A->cmap);

1032:   /* zeros pointer */
1033:   b->A       = 0;
1034:   b->ctx     = 0;
1035:   b->x       = 0;
1036:   b->y       = 0;
1037:   b->mapping = 0;

1039:   /* special MATIS functions */
1040:   PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);
1041:   PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);
1042:   PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);
1043:   PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);
1044:   PetscObjectChangeTypeName((PetscObject)A,MATIS);
1045:   return(0);
1046: }