Actual source code: mpiadj.c

petsc-master 2016-07-25
Report Typos and Errors
  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5: #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
  6: #include <petscsf.h>

 10: /*
 11:  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
 12:  * */
 13: static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
 14: {
 15:   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
 16:   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
 17:   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
 18:   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
 19:   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
 20:   PetscLayout        rmap;
 21:   MPI_Comm           comm;
 22:   PetscSF            sf;
 23:   PetscSFNode       *iremote;
 24:   PetscBool          done;
 25:   PetscErrorCode     ierr;

 28:   PetscObjectGetComm((PetscObject)adj,&comm);
 29:   MatGetLayouts(adj,&rmap,PETSC_NULL);
 30:   ISGetLocalSize(irows,&nlrows_is);
 31:   ISGetIndices(irows,&irows_indices);
 32:   PetscCalloc1(nlrows_is,&iremote);
 33:   /* construct sf graph*/
 34:   nleaves = nlrows_is;
 35:   for (i=0; i<nlrows_is; i++){
 36:     owner = -1;
 37:     rlocalindex = -1;
 38:     PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);
 39:     iremote[i].rank  = owner;
 40:     iremote[i].index = rlocalindex;
 41:   }
 42:   MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 43:   PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);
 44:   nroots = nlrows_mat;
 45:   for (i=0; i<nlrows_mat; i++){
 46:     ncols_send[i] = xadj[i+1]-xadj[i];
 47:   }
 48:   PetscSFCreate(comm,&sf);
 49:   PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 50:   PetscSFSetType(sf,PETSCSFBASIC);
 51:   PetscSFSetFromOptions(sf);
 52:   PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);
 53:   PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);
 54:   PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);
 55:   PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);
 56:   PetscSFDestroy(&sf);
 57:   Ncols_recv =0;
 58:   for (i=0; i<nlrows_is; i++){
 59:     Ncols_recv             += ncols_recv[i];
 60:     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
 61:   }
 62:   Ncols_send = 0;
 63:   for(i=0; i<nlrows_mat; i++){
 64:     Ncols_send += ncols_send[i];
 65:   }
 66:   PetscCalloc1(Ncols_recv,&iremote);
 67:   PetscCalloc1(Ncols_recv,&adjncy_recv);
 68:   nleaves = Ncols_recv;
 69:   Ncols_recv = 0;
 70:   for (i=0; i<nlrows_is; i++){
 71:     PetscLayoutFindOwner(rmap,irows_indices[i],&owner);
 72:     for (j=0; j<ncols_recv[i]; j++){
 73:       iremote[Ncols_recv].rank    = owner;
 74:       iremote[Ncols_recv++].index = xadj_recv[i]+j;
 75:     }
 76:   }
 77:   ISRestoreIndices(irows,&irows_indices);
 78:   /*if we need to deal with edge weights ???*/
 79:   if (a->values){isvalue=1;} else {isvalue=0;}
 80:   /*involve a global communication */
 81:   /*MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);*/
 82:   if (isvalue){PetscCalloc1(Ncols_recv,&values_recv);}
 83:   nroots = Ncols_send;
 84:   PetscSFCreate(comm,&sf);
 85:   PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 86:   PetscSFSetType(sf,PETSCSFBASIC);
 87:   PetscSFSetFromOptions(sf);
 88:   PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);
 89:   PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);
 90:   if (isvalue){
 91:     PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);
 92:     PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);
 93:   }
 94:   PetscSFDestroy(&sf);
 95:   MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 96:   ISGetLocalSize(icols,&icols_n);
 97:   ISGetIndices(icols,&icols_indices);
 98:   rnclos = 0;
 99:   for (i=0; i<nlrows_is; i++){
100:     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
101:       PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);
102:       if (loc<0){
103:         adjncy_recv[j] = -1;
104:         if (isvalue) values_recv[j] = -1;
105:         ncols_recv[i]--;
106:       } else {
107:         rnclos++;
108:       }
109:     }
110:   }
111:   ISRestoreIndices(icols,&icols_indices);
112:   PetscCalloc1(rnclos,&sadjncy);
113:   if (isvalue) {PetscCalloc1(rnclos,&svalues);}
114:   PetscCalloc1(nlrows_is+1,&sxadj);
115:   rnclos = 0;
116:   for(i=0; i<nlrows_is; i++){
117:     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
118:       if (adjncy_recv[j]<0) continue;
119:       sadjncy[rnclos] = adjncy_recv[j];
120:       if (isvalue) svalues[rnclos] = values_recv[j];
121:       rnclos++;
122:     }
123:   }
124:   for(i=0; i<nlrows_is; i++){
125:     sxadj[i+1] = sxadj[i]+ncols_recv[i];
126:   }
127:   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { PetscFree(sxadj);}
128:   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { PetscFree(sadjncy);}
129:   if (sadj_values){
130:     if (isvalue) *sadj_values = svalues; else *sadj_values=0;
131:   } else {
132:     if (isvalue) {PetscFree(svalues);}
133:   }
134:   PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);
135:   PetscFree(adjncy_recv);
136:   if (isvalue) {PetscFree(values_recv);}
137:   return(0);
138: }

142: static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
143: {
144:   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
145:   PetscInt          *indices,nindx,j,k,loc;
146:   PetscMPIInt        issame;
147:   const PetscInt    *irow_indices,*icol_indices;
148:   MPI_Comm           scomm_row,scomm_col,scomm_mat;
149:   PetscErrorCode     ierr;

152:   nindx = 0;
153:   /*
154:    * Estimate a maximum number for allocating memory
155:    */
156:   for(i=0; i<n; i++){
157:     ISGetLocalSize(irow[i],&irow_n);
158:     ISGetLocalSize(icol[i],&icol_n);
159:     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
160:   }
161:   PetscCalloc1(nindx,&indices);
162:   /* construct a submat */
163:   for(i=0; i<n; i++){
164:     if (subcomm){
165:       PetscObjectGetComm((PetscObject)irow[i],&scomm_row);
166:       PetscObjectGetComm((PetscObject)icol[i],&scomm_col);
167:       MPI_Comm_compare(scomm_row,scomm_col,&issame);
168:       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
169:       MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);
170:       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
171:     } else {
172:       scomm_row = PETSC_COMM_SELF;
173:     }
174:     /*get sub-matrix data*/
175:     sxadj=0; sadjncy=0; svalues=0;
176:     MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);
177:     ISGetLocalSize(irow[i],&irow_n);
178:     ISGetLocalSize(icol[i],&icol_n);
179:     ISGetIndices(irow[i],&irow_indices);
180:     PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);
181:     ISRestoreIndices(irow[i],&irow_indices);
182:     ISGetIndices(icol[i],&icol_indices);
183:     PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);
184:     ISRestoreIndices(icol[i],&icol_indices);
185:     nindx = irow_n+icol_n;
186:     PetscSortRemoveDupsInt(&nindx,indices);
187:     /* renumber columns */
188:     for(j=0; j<irow_n; j++){
189:       for(k=sxadj[j]; k<sxadj[j+1]; k++){
190:         PetscFindInt(sadjncy[k],nindx,indices,&loc);
191: #if PETSC_USE_DEBUG
192:         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
193: #endif
194:         sadjncy[k] = loc;
195:       }
196:     }
197:     if (scall==MAT_INITIAL_MATRIX){
198:       MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);
199:     } else {
200:        Mat                sadj = *(submat[i]);
201:        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
202:        PetscObjectGetComm((PetscObject)sadj,&scomm_mat);
203:        MPI_Comm_compare(scomm_row,scomm_mat,&issame);
204:        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
205:        PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));
206:        PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);
207:        if (svalues){PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);}
208:        PetscFree(sxadj);
209:        PetscFree(sadjncy);
210:        if (svalues) {PetscFree(svalues);}
211:     }
212:   }
213:   PetscFree(indices);
214:   return(0);
215: }

219: static PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
220: {
221:   PetscErrorCode     ierr;
222:   /*get sub-matrices across a sub communicator */
224:   MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);
225:   return(0);
226: }

230: static PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
231: {
232:   PetscErrorCode     ierr;

235:   /*get sub-matrices based on PETSC_COMM_SELF */
236:   MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);
237:   return(0);
238: }

242: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
243: {
244:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
245:   PetscErrorCode    ierr;
246:   PetscInt          i,j,m = A->rmap->n;
247:   const char        *name;
248:   PetscViewerFormat format;

251:   PetscObjectGetName((PetscObject)A,&name);
252:   PetscViewerGetFormat(viewer,&format);
253:   if (format == PETSC_VIEWER_ASCII_INFO) {
254:     return(0);
255:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
256:   else {
257:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
258:     PetscViewerASCIIPushSynchronized(viewer);
259:     for (i=0; i<m; i++) {
260:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
261:       for (j=a->i[i]; j<a->i[i+1]; j++) {
262:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
263:       }
264:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
265:     }
266:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
267:     PetscViewerFlush(viewer);
268:     PetscViewerASCIIPopSynchronized(viewer);
269:   }
270:   return(0);
271: }

275: static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
276: {
278:   PetscBool      iascii;

281:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
282:   if (iascii) {
283:     MatView_MPIAdj_ASCII(A,viewer);
284:   }
285:   return(0);
286: }

290: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
291: {
292:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

296: #if defined(PETSC_USE_LOG)
297:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
298: #endif
299:   PetscFree(a->diag);
300:   if (a->freeaij) {
301:     if (a->freeaijwithfree) {
302:       if (a->i) free(a->i);
303:       if (a->j) free(a->j);
304:     } else {
305:       PetscFree(a->i);
306:       PetscFree(a->j);
307:       PetscFree(a->values);
308:     }
309:   }
310:   PetscFree(a->rowvalues);
311:   PetscFree(mat->data);
312:   PetscObjectChangeTypeName((PetscObject)mat,0);
313:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
314:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
315:   return(0);
316: }

320: static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
321: {
322:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

326:   switch (op) {
327:   case MAT_SYMMETRIC:
328:   case MAT_STRUCTURALLY_SYMMETRIC:
329:   case MAT_HERMITIAN:
330:     a->symmetric = flg;
331:     break;
332:   case MAT_SYMMETRY_ETERNAL:
333:     break;
334:   default:
335:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
336:     break;
337:   }
338:   return(0);
339: }

343: static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
344: {
345:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

349:   row -= A->rmap->rstart;
350:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
351:   *nz = a->i[row+1] - a->i[row];
352:   if (v) {
353:     PetscInt j;
354:     if (a->rowvalues_alloc < *nz) {
355:       PetscFree(a->rowvalues);
356:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
357:       PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
358:     }
359:     for (j=0; j<*nz; j++){
360:       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
361:     }
362:     *v = (*nz) ? a->rowvalues : NULL;
363:   }
364:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
365:   return(0);
366: }

370: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
371: {
373:   return(0);
374: }

378: static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
379: {
380:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
382:   PetscBool      flag;

385:   /* If the  matrix dimensions are not equal,or no of nonzeros */
386:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
387:     flag = PETSC_FALSE;
388:   }

390:   /* if the a->i are the same */
391:   PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);

393:   /* if a->j are the same */
394:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

396:   MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
397:   return(0);
398: }

402: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
403: {
404:   PetscInt   i;
405:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
406:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

409:   *m    = A->rmap->n;
410:   *ia   = a->i;
411:   *ja   = a->j;
412:   *done = PETSC_TRUE;
413:   if (oshift) {
414:     for (i=0; i<(*ia)[*m]; i++) {
415:       (*ja)[i]++;
416:     }
417:     for (i=0; i<=(*m); i++) (*ia)[i]++;
418:   }
419:   return(0);
420: }

424: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
425: {
426:   PetscInt   i;
427:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
428:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

431:   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
432:   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
433:   if (oshift) {
434:     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
435:     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
436:     for (i=0; i<=(*m); i++) (*ia)[i]--;
437:     for (i=0; i<(*ia)[*m]; i++) {
438:       (*ja)[i]--;
439:     }
440:   }
441:   return(0);
442: }

446: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
447: {
448:   Mat               B;
449:   PetscErrorCode    ierr;
450:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
451:   const PetscInt    *rj;
452:   const PetscScalar *ra;
453:   MPI_Comm          comm;

456:   MatGetSize(A,NULL,&N);
457:   MatGetLocalSize(A,&m,NULL);
458:   MatGetOwnershipRange(A,&rstart,NULL);

460:   /* count the number of nonzeros per row */
461:   for (i=0; i<m; i++) {
462:     MatGetRow(A,i+rstart,&len,&rj,NULL);
463:     for (j=0; j<len; j++) {
464:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
465:     }
466:     nzeros += len;
467:     MatRestoreRow(A,i+rstart,&len,&rj,NULL);
468:   }

470:   /* malloc space for nonzeros */
471:   PetscMalloc1(nzeros+1,&a);
472:   PetscMalloc1(N+1,&ia);
473:   PetscMalloc1(nzeros+1,&ja);

475:   nzeros = 0;
476:   ia[0]  = 0;
477:   for (i=0; i<m; i++) {
478:     MatGetRow(A,i+rstart,&len,&rj,&ra);
479:     cnt  = 0;
480:     for (j=0; j<len; j++) {
481:       if (rj[j] != i+rstart) { /* if not diagonal */
482:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
483:         ja[nzeros+cnt++] = rj[j];
484:       }
485:     }
486:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
487:     nzeros += cnt;
488:     ia[i+1] = nzeros;
489:   }

491:   PetscObjectGetComm((PetscObject)A,&comm);
492:   MatCreate(comm,&B);
493:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
494:   MatSetType(B,type);
495:   MatMPIAdjSetPreallocation(B,ia,ja,a);

497:   if (reuse == MAT_INPLACE_MATRIX) {
498:     MatHeaderReplace(A,&B);
499:   } else {
500:     *newmat = B;
501:   }
502:   return(0);
503: }

505: /* -------------------------------------------------------------------*/
506: static struct _MatOps MatOps_Values = {0,
507:                                        MatGetRow_MPIAdj,
508:                                        MatRestoreRow_MPIAdj,
509:                                        0,
510:                                 /* 4*/ 0,
511:                                        0,
512:                                        0,
513:                                        0,
514:                                        0,
515:                                        0,
516:                                 /*10*/ 0,
517:                                        0,
518:                                        0,
519:                                        0,
520:                                        0,
521:                                 /*15*/ 0,
522:                                        MatEqual_MPIAdj,
523:                                        0,
524:                                        0,
525:                                        0,
526:                                 /*20*/ 0,
527:                                        0,
528:                                        MatSetOption_MPIAdj,
529:                                        0,
530:                                 /*24*/ 0,
531:                                        0,
532:                                        0,
533:                                        0,
534:                                        0,
535:                                 /*29*/ 0,
536:                                        0,
537:                                        0,
538:                                        0,
539:                                        0,
540:                                 /*34*/ 0,
541:                                        0,
542:                                        0,
543:                                        0,
544:                                        0,
545:                                 /*39*/ 0,
546:                                        MatGetSubMatrices_MPIAdj,
547:                                        0,
548:                                        0,
549:                                        0,
550:                                 /*44*/ 0,
551:                                        0,
552:                                        MatShift_Basic,
553:                                        0,
554:                                        0,
555:                                 /*49*/ 0,
556:                                        MatGetRowIJ_MPIAdj,
557:                                        MatRestoreRowIJ_MPIAdj,
558:                                        0,
559:                                        0,
560:                                 /*54*/ 0,
561:                                        0,
562:                                        0,
563:                                        0,
564:                                        0,
565:                                 /*59*/ 0,
566:                                        MatDestroy_MPIAdj,
567:                                        MatView_MPIAdj,
568:                                        MatConvertFrom_MPIAdj,
569:                                        0,
570:                                 /*64*/ 0,
571:                                        0,
572:                                        0,
573:                                        0,
574:                                        0,
575:                                 /*69*/ 0,
576:                                        0,
577:                                        0,
578:                                        0,
579:                                        0,
580:                                 /*74*/ 0,
581:                                        0,
582:                                        0,
583:                                        0,
584:                                        0,
585:                                 /*79*/ 0,
586:                                        0,
587:                                        0,
588:                                        0,
589:                                        0,
590:                                 /*84*/ 0,
591:                                        0,
592:                                        0,
593:                                        0,
594:                                        0,
595:                                 /*89*/ 0,
596:                                        0,
597:                                        0,
598:                                        0,
599:                                        0,
600:                                 /*94*/ 0,
601:                                        0,
602:                                        0,
603:                                        0,
604:                                        0,
605:                                 /*99*/ 0,
606:                                        0,
607:                                        0,
608:                                        0,
609:                                        0,
610:                                /*104*/ 0,
611:                                        0,
612:                                        0,
613:                                        0,
614:                                        0,
615:                                /*109*/ 0,
616:                                        0,
617:                                        0,
618:                                        0,
619:                                        0,
620:                                /*114*/ 0,
621:                                        0,
622:                                        0,
623:                                        0,
624:                                        0,
625:                                /*119*/ 0,
626:                                        0,
627:                                        0,
628:                                        0,
629:                                        0,
630:                                /*124*/ 0,
631:                                        0,
632:                                        0,
633:                                        0,
634:                                        MatGetSubMatricesMPI_MPIAdj,
635:                                /*129*/ 0,
636:                                        0,
637:                                        0,
638:                                        0,
639:                                        0,
640:                                /*134*/ 0,
641:                                        0,
642:                                        0,
643:                                        0,
644:                                        0,
645:                                /*139*/ 0,
646:                                        0,
647:                                        0
648: };

652: static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
653: {
654:   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
656: #if defined(PETSC_USE_DEBUG)
657:   PetscInt ii;
658: #endif

661:   PetscLayoutSetUp(B->rmap);
662:   PetscLayoutSetUp(B->cmap);

664: #if defined(PETSC_USE_DEBUG)
665:   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
666:   for (ii=1; ii<B->rmap->n; ii++) {
667:     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
668:   }
669:   for (ii=0; ii<i[B->rmap->n]; ii++) {
670:     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
671:   }
672: #endif
673:   B->preallocated = PETSC_TRUE;

675:   b->j      = j;
676:   b->i      = i;
677:   b->values = values;

679:   b->nz        = i[B->rmap->n];
680:   b->diag      = 0;
681:   b->symmetric = PETSC_FALSE;
682:   b->freeaij   = PETSC_TRUE;

684:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
685:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
686:   return(0);
687: }

691: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
692: {
693:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
695:   const PetscInt *ranges;
696:   MPI_Comm       acomm,bcomm;
697:   MPI_Group      agroup,bgroup;
698:   PetscMPIInt    i,rank,size,nranks,*ranks;

701:   *B    = NULL;
702:   PetscObjectGetComm((PetscObject)A,&acomm);
703:   MPI_Comm_size(acomm,&size);
704:   MPI_Comm_size(acomm,&rank);
705:   MatGetOwnershipRanges(A,&ranges);
706:   for (i=0,nranks=0; i<size; i++) {
707:     if (ranges[i+1] - ranges[i] > 0) nranks++;
708:   }
709:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
710:     PetscObjectReference((PetscObject)A);
711:     *B   = A;
712:     return(0);
713:   }

715:   PetscMalloc1(nranks,&ranks);
716:   for (i=0,nranks=0; i<size; i++) {
717:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
718:   }
719:   MPI_Comm_group(acomm,&agroup);
720:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
721:   PetscFree(ranks);
722:   MPI_Comm_create(acomm,bgroup,&bcomm);
723:   MPI_Group_free(&agroup);
724:   MPI_Group_free(&bgroup);
725:   if (bcomm != MPI_COMM_NULL) {
726:     PetscInt   m,N;
727:     Mat_MPIAdj *b;
728:     MatGetLocalSize(A,&m,NULL);
729:     MatGetSize(A,NULL,&N);
730:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
731:     b          = (Mat_MPIAdj*)(*B)->data;
732:     b->freeaij = PETSC_FALSE;
733:     MPI_Comm_free(&bcomm);
734:   }
735:   return(0);
736: }

740: /*@
741:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

743:    Collective

745:    Input Arguments:
746: .  A - original MPIAdj matrix

748:    Output Arguments:
749: .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A

751:    Level: developer

753:    Note:
754:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

756:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

758: .seealso: MatCreateMPIAdj()
759: @*/
760: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
761: {

766:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
767:   return(0);
768: }

770: /*MC
771:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
772:    intended for use constructing orderings and partitionings.

774:   Level: beginner

776: .seealso: MatCreateMPIAdj
777: M*/

781: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
782: {
783:   Mat_MPIAdj     *b;

787:   PetscNewLog(B,&b);
788:   B->data      = (void*)b;
789:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
790:   B->assembled = PETSC_FALSE;

792:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
793:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
794:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
795:   return(0);
796: }

800: /*@C
801:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

803:    Logically Collective on MPI_Comm

805:    Input Parameters:
806: +  A - the matrix
807: .  i - the indices into j for the start of each row
808: .  j - the column indices for each row (sorted for each row).
809:        The indices in i and j start with zero (NOT with one).
810: -  values - [optional] edge weights

812:    Level: intermediate

814: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
815: @*/
816: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
817: {

821:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
822:   return(0);
823: }

827: /*@C
828:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
829:    The matrix does not have numerical values associated with it, but is
830:    intended for ordering (to reduce bandwidth etc) and partitioning.

832:    Collective on MPI_Comm

834:    Input Parameters:
835: +  comm - MPI communicator
836: .  m - number of local rows
837: .  N - number of global columns
838: .  i - the indices into j for the start of each row
839: .  j - the column indices for each row (sorted for each row).
840:        The indices in i and j start with zero (NOT with one).
841: -  values -[optional] edge weights

843:    Output Parameter:
844: .  A - the matrix

846:    Level: intermediate

848:    Notes: This matrix object does not support most matrix operations, include
849:    MatSetValues().
850:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
851:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
852:     call from Fortran you need not create the arrays with PetscMalloc().
853:    Should not include the matrix diagonals.

855:    If you already have a matrix, you can create its adjacency matrix by a call
856:    to MatConvert, specifying a type of MATMPIADJ.

858:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

860: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
861: @*/
862: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
863: {

867:   MatCreate(comm,A);
868:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
869:   MatSetType(*A,MATMPIADJ);
870:   MatMPIAdjSetPreallocation(*A,i,j,values);
871:   return(0);
872: }