Actual source code: mpiadj.c

petsc-master 2019-11-16
Report Typos and Errors

  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5:  #include <../src/mat/impls/adj/mpi/mpiadj.h>
  6:  #include <petscsf.h>

  8: /*
  9:  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
 10:  * */
 11: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
 12: {
 13:   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
 14:   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
 15:   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
 16:   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
 17:   PetscMPIInt        owner;
 18:   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
 19:   PetscLayout        rmap;
 20:   MPI_Comm           comm;
 21:   PetscSF            sf;
 22:   PetscSFNode       *iremote;
 23:   PetscBool          done;
 24:   PetscErrorCode     ierr;

 27:   PetscObjectGetComm((PetscObject)adj,&comm);
 28:   MatGetLayouts(adj,&rmap,NULL);
 29:   ISGetLocalSize(irows,&nlrows_is);
 30:   ISGetIndices(irows,&irows_indices);
 31:   PetscMalloc1(nlrows_is,&iremote);
 32:   /* construct sf graph*/
 33:   nleaves = nlrows_is;
 34:   for (i=0; i<nlrows_is; i++){
 35:     owner = -1;
 36:     rlocalindex = -1;
 37:     PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);
 38:     iremote[i].rank  = owner;
 39:     iremote[i].index = rlocalindex;
 40:   }
 41:   MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 42:   PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);
 43:   nroots = nlrows_mat;
 44:   for (i=0; i<nlrows_mat; i++){
 45:     ncols_send[i] = xadj[i+1]-xadj[i];
 46:   }
 47:   PetscSFCreate(comm,&sf);
 48:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 49:   PetscSFSetType(sf,PETSCSFBASIC);
 50:   PetscSFSetFromOptions(sf);
 51:   PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);
 52:   PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);
 53:   PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);
 54:   PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);
 55:   PetscSFDestroy(&sf);
 56:   Ncols_recv =0;
 57:   for (i=0; i<nlrows_is; i++){
 58:     Ncols_recv             += ncols_recv[i];
 59:     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
 60:   }
 61:   Ncols_send = 0;
 62:   for(i=0; i<nlrows_mat; i++){
 63:     Ncols_send += ncols_send[i];
 64:   }
 65:   PetscCalloc1(Ncols_recv,&iremote);
 66:   PetscCalloc1(Ncols_recv,&adjncy_recv);
 67:   nleaves = Ncols_recv;
 68:   Ncols_recv = 0;
 69:   for (i=0; i<nlrows_is; i++){
 70:     PetscLayoutFindOwner(rmap,irows_indices[i],&owner);
 71:     for (j=0; j<ncols_recv[i]; j++){
 72:       iremote[Ncols_recv].rank    = owner;
 73:       iremote[Ncols_recv++].index = xadj_recv[i]+j;
 74:     }
 75:   }
 76:   ISRestoreIndices(irows,&irows_indices);
 77:   /*if we need to deal with edge weights ???*/
 78:   if (a->useedgeweights){PetscCalloc1(Ncols_recv,&values_recv);}
 79:   nroots = Ncols_send;
 80:   PetscSFCreate(comm,&sf);
 81:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 82:   PetscSFSetType(sf,PETSCSFBASIC);
 83:   PetscSFSetFromOptions(sf);
 84:   PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);
 85:   PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);
 86:   if (a->useedgeweights){
 87:     PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);
 88:     PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);
 89:   }
 90:   PetscSFDestroy(&sf);
 91:   MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 92:   ISGetLocalSize(icols,&icols_n);
 93:   ISGetIndices(icols,&icols_indices);
 94:   rnclos = 0;
 95:   for (i=0; i<nlrows_is; i++){
 96:     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
 97:       PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);
 98:       if (loc<0){
 99:         adjncy_recv[j] = -1;
100:         if (a->useedgeweights) values_recv[j] = -1;
101:         ncols_recv[i]--;
102:       } else {
103:         rnclos++;
104:       }
105:     }
106:   }
107:   ISRestoreIndices(icols,&icols_indices);
108:   PetscCalloc1(rnclos,&sadjncy);
109:   if (a->useedgeweights) {PetscCalloc1(rnclos,&svalues);}
110:   PetscCalloc1(nlrows_is+1,&sxadj);
111:   rnclos = 0;
112:   for(i=0; i<nlrows_is; i++){
113:     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
114:       if (adjncy_recv[j]<0) continue;
115:       sadjncy[rnclos] = adjncy_recv[j];
116:       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
117:       rnclos++;
118:     }
119:   }
120:   for(i=0; i<nlrows_is; i++){
121:     sxadj[i+1] = sxadj[i]+ncols_recv[i];
122:   }
123:   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { PetscFree(sxadj);}
124:   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { PetscFree(sadjncy);}
125:   if (sadj_values){
126:     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=0;
127:   } else {
128:     if (a->useedgeweights) {PetscFree(svalues);}
129:   }
130:   PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);
131:   PetscFree(adjncy_recv);
132:   if (a->useedgeweights) {PetscFree(values_recv);}
133:   return(0);
134: }

136: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
137: {
138:   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
139:   PetscInt          *indices,nindx,j,k,loc;
140:   PetscMPIInt        issame;
141:   const PetscInt    *irow_indices,*icol_indices;
142:   MPI_Comm           scomm_row,scomm_col,scomm_mat;
143:   PetscErrorCode     ierr;

146:   nindx = 0;
147:   /*
148:    * Estimate a maximum number for allocating memory
149:    */
150:   for(i=0; i<n; i++){
151:     ISGetLocalSize(irow[i],&irow_n);
152:     ISGetLocalSize(icol[i],&icol_n);
153:     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
154:   }
155:   PetscMalloc1(nindx,&indices);
156:   /* construct a submat */
157:   for (i=0; i<n; i++){
158:     if (subcomm){
159:       PetscObjectGetComm((PetscObject)irow[i],&scomm_row);
160:       PetscObjectGetComm((PetscObject)icol[i],&scomm_col);
161:       MPI_Comm_compare(scomm_row,scomm_col,&issame);
162:       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
163:       MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);
164:       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
165:     } else {
166:       scomm_row = PETSC_COMM_SELF;
167:     }
168:     /*get sub-matrix data*/
169:     sxadj=0; sadjncy=0; svalues=0;
170:     MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);
171:     ISGetLocalSize(irow[i],&irow_n);
172:     ISGetLocalSize(icol[i],&icol_n);
173:     ISGetIndices(irow[i],&irow_indices);
174:     PetscArraycpy(indices,irow_indices,irow_n);
175:     ISRestoreIndices(irow[i],&irow_indices);
176:     ISGetIndices(icol[i],&icol_indices);
177:     PetscArraycpy(indices+irow_n,icol_indices,icol_n);
178:     ISRestoreIndices(icol[i],&icol_indices);
179:     nindx = irow_n+icol_n;
180:     PetscSortRemoveDupsInt(&nindx,indices);
181:     /* renumber columns */
182:     for (j=0; j<irow_n; j++){
183:       for (k=sxadj[j]; k<sxadj[j+1]; k++){
184:         PetscFindInt(sadjncy[k],nindx,indices,&loc);
185: #if defined(PETSC_USE_DEBUG)
186:         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
187: #endif
188:         sadjncy[k] = loc;
189:       }
190:     }
191:     if (scall==MAT_INITIAL_MATRIX){
192:       MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);
193:     } else {
194:        Mat                sadj = *(submat[i]);
195:        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
196:        PetscObjectGetComm((PetscObject)sadj,&scomm_mat);
197:        MPI_Comm_compare(scomm_row,scomm_mat,&issame);
198:        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
199:        PetscArraycpy(sa->i,sxadj,irow_n+1);
200:        PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);
201:        if (svalues){PetscArraycpy(sa->values,svalues,sxadj[irow_n]);}
202:        PetscFree(sxadj);
203:        PetscFree(sadjncy);
204:        if (svalues) {PetscFree(svalues);}
205:     }
206:   }
207:   PetscFree(indices);
208:   return(0);
209: }

211: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
212: {
213:   PetscErrorCode     ierr;
214:   /*get sub-matrices across a sub communicator */
216:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);
217:   return(0);
218: }

220: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
221: {
222:   PetscErrorCode     ierr;

225:   /*get sub-matrices based on PETSC_COMM_SELF */
226:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);
227:   return(0);
228: }

230: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
231: {
232:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
233:   PetscErrorCode    ierr;
234:   PetscInt          i,j,m = A->rmap->n;
235:   const char        *name;
236:   PetscViewerFormat format;

239:   PetscObjectGetName((PetscObject)A,&name);
240:   PetscViewerGetFormat(viewer,&format);
241:   if (format == PETSC_VIEWER_ASCII_INFO) {
242:     return(0);
243:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
244:   else {
245:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
246:     PetscViewerASCIIPushSynchronized(viewer);
247:     for (i=0; i<m; i++) {
248:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
249:       for (j=a->i[i]; j<a->i[i+1]; j++) {
250:         if (a->values) {
251:           PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);
252:         } else {
253:           PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
254:         }
255:       }
256:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
257:     }
258:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
259:     PetscViewerFlush(viewer);
260:     PetscViewerASCIIPopSynchronized(viewer);
261:   }
262:   return(0);
263: }

265: static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
266: {
268:   PetscBool      iascii;

271:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
272:   if (iascii) {
273:     MatView_MPIAdj_ASCII(A,viewer);
274:   }
275:   return(0);
276: }

278: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
279: {
280:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

284: #if defined(PETSC_USE_LOG)
285:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
286: #endif
287:   PetscFree(a->diag);
288:   if (a->freeaij) {
289:     if (a->freeaijwithfree) {
290:       if (a->i) free(a->i);
291:       if (a->j) free(a->j);
292:     } else {
293:       PetscFree(a->i);
294:       PetscFree(a->j);
295:       PetscFree(a->values);
296:     }
297:   }
298:   PetscFree(a->rowvalues);
299:   PetscFree(mat->data);
300:   PetscObjectChangeTypeName((PetscObject)mat,0);
301:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
302:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
303:   return(0);
304: }

306: static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
307: {
308:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

312:   switch (op) {
313:   case MAT_SYMMETRIC:
314:   case MAT_STRUCTURALLY_SYMMETRIC:
315:   case MAT_HERMITIAN:
316:     a->symmetric = flg;
317:     break;
318:   case MAT_SYMMETRY_ETERNAL:
319:     break;
320:   default:
321:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
322:     break;
323:   }
324:   return(0);
325: }

327: static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
328: {
329:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

333:   row -= A->rmap->rstart;
334:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
335:   *nz = a->i[row+1] - a->i[row];
336:   if (v) {
337:     PetscInt j;
338:     if (a->rowvalues_alloc < *nz) {
339:       PetscFree(a->rowvalues);
340:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
341:       PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
342:     }
343:     for (j=0; j<*nz; j++){
344:       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
345:     }
346:     *v = (*nz) ? a->rowvalues : NULL;
347:   }
348:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
349:   return(0);
350: }

352: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
353: {
355:   return(0);
356: }

358: static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
359: {
360:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
362:   PetscBool      flag;

365:   /* If the  matrix dimensions are not equal,or no of nonzeros */
366:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
367:     flag = PETSC_FALSE;
368:   }

370:   /* if the a->i are the same */
371:   PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);

373:   /* if a->j are the same */
374:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

376:   MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
377:   return(0);
378: }

380: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
381: {
382:   PetscInt   i;
383:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
384:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

387:   *m    = A->rmap->n;
388:   *ia   = a->i;
389:   *ja   = a->j;
390:   *done = PETSC_TRUE;
391:   if (oshift) {
392:     for (i=0; i<(*ia)[*m]; i++) {
393:       (*ja)[i]++;
394:     }
395:     for (i=0; i<=(*m); i++) (*ia)[i]++;
396:   }
397:   return(0);
398: }

400: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
401: {
402:   PetscInt   i;
403:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
404:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

407:   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
408:   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
409:   if (oshift) {
410:     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
411:     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
412:     for (i=0; i<=(*m); i++) (*ia)[i]--;
413:     for (i=0; i<(*ia)[*m]; i++) {
414:       (*ja)[i]--;
415:     }
416:   }
417:   return(0);
418: }

420: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
421: {
422:   Mat               B;
423:   PetscErrorCode    ierr;
424:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
425:   const PetscInt    *rj;
426:   const PetscScalar *ra;
427:   MPI_Comm          comm;

430:   MatGetSize(A,NULL,&N);
431:   MatGetLocalSize(A,&m,NULL);
432:   MatGetOwnershipRange(A,&rstart,NULL);

434:   /* count the number of nonzeros per row */
435:   for (i=0; i<m; i++) {
436:     MatGetRow(A,i+rstart,&len,&rj,NULL);
437:     for (j=0; j<len; j++) {
438:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
439:     }
440:     nzeros += len;
441:     MatRestoreRow(A,i+rstart,&len,&rj,NULL);
442:   }

444:   /* malloc space for nonzeros */
445:   PetscMalloc1(nzeros+1,&a);
446:   PetscMalloc1(N+1,&ia);
447:   PetscMalloc1(nzeros+1,&ja);

449:   nzeros = 0;
450:   ia[0]  = 0;
451:   for (i=0; i<m; i++) {
452:     MatGetRow(A,i+rstart,&len,&rj,&ra);
453:     cnt  = 0;
454:     for (j=0; j<len; j++) {
455:       if (rj[j] != i+rstart) { /* if not diagonal */
456:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
457:         ja[nzeros+cnt++] = rj[j];
458:       }
459:     }
460:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
461:     nzeros += cnt;
462:     ia[i+1] = nzeros;
463:   }

465:   PetscObjectGetComm((PetscObject)A,&comm);
466:   MatCreate(comm,&B);
467:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
468:   MatSetType(B,type);
469:   MatMPIAdjSetPreallocation(B,ia,ja,a);

471:   if (reuse == MAT_INPLACE_MATRIX) {
472:     MatHeaderReplace(A,&B);
473:   } else {
474:     *newmat = B;
475:   }
476:   return(0);
477: }

479: /* -------------------------------------------------------------------*/
480: static struct _MatOps MatOps_Values = {0,
481:                                        MatGetRow_MPIAdj,
482:                                        MatRestoreRow_MPIAdj,
483:                                        0,
484:                                 /* 4*/ 0,
485:                                        0,
486:                                        0,
487:                                        0,
488:                                        0,
489:                                        0,
490:                                 /*10*/ 0,
491:                                        0,
492:                                        0,
493:                                        0,
494:                                        0,
495:                                 /*15*/ 0,
496:                                        MatEqual_MPIAdj,
497:                                        0,
498:                                        0,
499:                                        0,
500:                                 /*20*/ 0,
501:                                        0,
502:                                        MatSetOption_MPIAdj,
503:                                        0,
504:                                 /*24*/ 0,
505:                                        0,
506:                                        0,
507:                                        0,
508:                                        0,
509:                                 /*29*/ 0,
510:                                        0,
511:                                        0,
512:                                        0,
513:                                        0,
514:                                 /*34*/ 0,
515:                                        0,
516:                                        0,
517:                                        0,
518:                                        0,
519:                                 /*39*/ 0,
520:                                        MatCreateSubMatrices_MPIAdj,
521:                                        0,
522:                                        0,
523:                                        0,
524:                                 /*44*/ 0,
525:                                        0,
526:                                        MatShift_Basic,
527:                                        0,
528:                                        0,
529:                                 /*49*/ 0,
530:                                        MatGetRowIJ_MPIAdj,
531:                                        MatRestoreRowIJ_MPIAdj,
532:                                        0,
533:                                        0,
534:                                 /*54*/ 0,
535:                                        0,
536:                                        0,
537:                                        0,
538:                                        0,
539:                                 /*59*/ 0,
540:                                        MatDestroy_MPIAdj,
541:                                        MatView_MPIAdj,
542:                                        MatConvertFrom_MPIAdj,
543:                                        0,
544:                                 /*64*/ 0,
545:                                        0,
546:                                        0,
547:                                        0,
548:                                        0,
549:                                 /*69*/ 0,
550:                                        0,
551:                                        0,
552:                                        0,
553:                                        0,
554:                                 /*74*/ 0,
555:                                        0,
556:                                        0,
557:                                        0,
558:                                        0,
559:                                 /*79*/ 0,
560:                                        0,
561:                                        0,
562:                                        0,
563:                                        0,
564:                                 /*84*/ 0,
565:                                        0,
566:                                        0,
567:                                        0,
568:                                        0,
569:                                 /*89*/ 0,
570:                                        0,
571:                                        0,
572:                                        0,
573:                                        0,
574:                                 /*94*/ 0,
575:                                        0,
576:                                        0,
577:                                        0,
578:                                        0,
579:                                 /*99*/ 0,
580:                                        0,
581:                                        0,
582:                                        0,
583:                                        0,
584:                                /*104*/ 0,
585:                                        0,
586:                                        0,
587:                                        0,
588:                                        0,
589:                                /*109*/ 0,
590:                                        0,
591:                                        0,
592:                                        0,
593:                                        0,
594:                                /*114*/ 0,
595:                                        0,
596:                                        0,
597:                                        0,
598:                                        0,
599:                                /*119*/ 0,
600:                                        0,
601:                                        0,
602:                                        0,
603:                                        0,
604:                                /*124*/ 0,
605:                                        0,
606:                                        0,
607:                                        0,
608:                                        MatCreateSubMatricesMPI_MPIAdj,
609:                                /*129*/ 0,
610:                                        0,
611:                                        0,
612:                                        0,
613:                                        0,
614:                                /*134*/ 0,
615:                                        0,
616:                                        0,
617:                                        0,
618:                                        0,
619:                                /*139*/ 0,
620:                                        0,
621:                                        0
622: };

624: static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
625: {
626:   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
627:   PetscBool       useedgeweights;
629: #if defined(PETSC_USE_DEBUG)
630:   PetscInt ii;
631: #endif

634:   PetscLayoutSetUp(B->rmap);
635:   PetscLayoutSetUp(B->cmap);
636:   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
637:   /* Make everybody knows if they are using edge weights or not */
638:   MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));

640: #if defined(PETSC_USE_DEBUG)
641:   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
642:   for (ii=1; ii<B->rmap->n; ii++) {
643:     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
644:   }
645:   for (ii=0; ii<i[B->rmap->n]; ii++) {
646:     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
647:   }
648: #endif
649:   B->preallocated = PETSC_TRUE;

651:   b->j      = j;
652:   b->i      = i;
653:   b->values = values;

655:   b->nz        = i[B->rmap->n];
656:   b->diag      = 0;
657:   b->symmetric = PETSC_FALSE;
658:   b->freeaij   = PETSC_TRUE;

660:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
661:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
662:   return(0);
663: }

665: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
666: {
667:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
669:   const PetscInt *ranges;
670:   MPI_Comm       acomm,bcomm;
671:   MPI_Group      agroup,bgroup;
672:   PetscMPIInt    i,rank,size,nranks,*ranks;

675:   *B    = NULL;
676:   PetscObjectGetComm((PetscObject)A,&acomm);
677:   MPI_Comm_size(acomm,&size);
678:   MPI_Comm_size(acomm,&rank);
679:   MatGetOwnershipRanges(A,&ranges);
680:   for (i=0,nranks=0; i<size; i++) {
681:     if (ranges[i+1] - ranges[i] > 0) nranks++;
682:   }
683:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
684:     PetscObjectReference((PetscObject)A);
685:     *B   = A;
686:     return(0);
687:   }

689:   PetscMalloc1(nranks,&ranks);
690:   for (i=0,nranks=0; i<size; i++) {
691:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
692:   }
693:   MPI_Comm_group(acomm,&agroup);
694:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
695:   PetscFree(ranks);
696:   MPI_Comm_create(acomm,bgroup,&bcomm);
697:   MPI_Group_free(&agroup);
698:   MPI_Group_free(&bgroup);
699:   if (bcomm != MPI_COMM_NULL) {
700:     PetscInt   m,N;
701:     Mat_MPIAdj *b;
702:     MatGetLocalSize(A,&m,NULL);
703:     MatGetSize(A,NULL,&N);
704:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
705:     b          = (Mat_MPIAdj*)(*B)->data;
706:     b->freeaij = PETSC_FALSE;
707:     MPI_Comm_free(&bcomm);
708:   }
709:   return(0);
710: }

712: PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
713: {
715:   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
716:   PetscInt       *Values = NULL;
717:   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
718:   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;

721:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
722:   MatGetSize(A,&M,&N);
723:   MatGetLocalSize(A,&m,NULL);
724:   nz   = adj->nz;
725:   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
726:   MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));

728:   PetscMPIIntCast(nz,&mnz);
729:   PetscMalloc2(size,&allnz,size,&dispnz);
730:   MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));
731:   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
732:   if (adj->values) {
733:     PetscMalloc1(NZ,&Values);
734:     MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
735:   }
736:   PetscMalloc1(NZ,&J);
737:   MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
738:   PetscFree2(allnz,dispnz);
739:   MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));
740:   nzstart -= nz;
741:   /* shift the i[] values so they will be correct after being received */
742:   for (i=0; i<m; i++) adj->i[i] += nzstart;
743:   PetscMalloc1(M+1,&II);
744:   PetscMPIIntCast(m,&mm);
745:   PetscMalloc2(size,&allm,size,&dispm);
746:   MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));
747:   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
748:   MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));
749:   PetscFree2(allm,dispm);
750:   II[M] = NZ;
751:   /* shift the i[] values back */
752:   for (i=0; i<m; i++) adj->i[i] -= nzstart;
753:   MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);
754:   return(0);
755: }

757: /*@
758:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

760:    Collective

762:    Input Arguments:
763: .  A - original MPIAdj matrix

765:    Output Arguments:
766: .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A

768:    Level: developer

770:    Note:
771:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

773:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

775: .seealso: MatCreateMPIAdj()
776: @*/
777: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
778: {

783:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
784:   return(0);
785: }

787: /*MC
788:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
789:    intended for use constructing orderings and partitionings.

791:   Level: beginner

793: .seealso: MatCreateMPIAdj
794: M*/

796: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
797: {
798:   Mat_MPIAdj     *b;

802:   PetscNewLog(B,&b);
803:   B->data      = (void*)b;
804:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
805:   B->assembled = PETSC_FALSE;

807:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
808:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
809:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);
810:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
811:   return(0);
812: }

814: /*@C
815:    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)

817:    Logically Collective

819:    Input Parameter:
820: .  A - the matrix

822:    Output Parameter:
823: .  B - the same matrix on all processes

825:    Level: intermediate

827: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
828: @*/
829: PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
830: {

834:   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
835:   return(0);
836: }

838: /*@C
839:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

841:    Logically Collective

843:    Input Parameters:
844: +  A - the matrix
845: .  i - the indices into j for the start of each row
846: .  j - the column indices for each row (sorted for each row).
847:        The indices in i and j start with zero (NOT with one).
848: -  values - [optional] edge weights

850:    Level: intermediate

852: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
853: @*/
854: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
855: {

859:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
860:   return(0);
861: }

863: /*@C
864:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
865:    The matrix does not have numerical values associated with it, but is
866:    intended for ordering (to reduce bandwidth etc) and partitioning.

868:    Collective

870:    Input Parameters:
871: +  comm - MPI communicator
872: .  m - number of local rows
873: .  N - number of global columns
874: .  i - the indices into j for the start of each row
875: .  j - the column indices for each row (sorted for each row).
876:        The indices in i and j start with zero (NOT with one).
877: -  values -[optional] edge weights

879:    Output Parameter:
880: .  A - the matrix

882:    Level: intermediate

884:    Notes:
885:     This matrix object does not support most matrix operations, include
886:    MatSetValues().
887:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
888:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
889:     call from Fortran you need not create the arrays with PetscMalloc().
890:    Should not include the matrix diagonals.

892:    If you already have a matrix, you can create its adjacency matrix by a call
893:    to MatConvert, specifying a type of MATMPIADJ.

895:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

897: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
898: @*/
899: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
900: {

904:   MatCreate(comm,A);
905:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
906:   MatSetType(*A,MATMPIADJ);
907:   MatMPIAdjSetPreallocation(*A,i,j,values);
908:   return(0);
909: }