Actual source code: mis.c

petsc-3.5.4 2015-05-23
Report Typos and Errors
  1: #include <petsc-private/matimpl.h>    /*I "petscmat.h" I*/
  2: #include <../src/mat/impls/aij/seq/aij.h>
  3: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  4: #include <petscsf.h>

  6: #define MIS_NOT_DONE -2
  7: #define MIS_DELETED  -1
  8: #define MIS_REMOVED  -3
  9: #define MIS_IS_SELECTED(s) (s!=MIS_DELETED && s!=MIS_NOT_DONE && s!=MIS_REMOVED)

 11: /* -------------------------------------------------------------------------- */
 12: /*
 13:    maxIndSetAgg - parallel maximal independent set (MIS) with data locality info. MatAIJ specific!!!

 15:    Input Parameter:
 16:    . perm - serial permutation of rows of local to process in MIS
 17:    . Gmat - glabal matrix of graph (data not defined)
 18:    . strict_aggs - flag for whether to keep strict (non overlapping) aggregates in 'llist';
 19:    . verbose -
 20:    Output Parameter:
 21:    . a_selected - IS of selected vertices, includes 'ghost' nodes at end with natural local indices
 22:    . a_locals_llist - array of list of nodes rooted at selected nodes
 23: */
 26: PetscErrorCode maxIndSetAgg(IS perm,Mat Gmat,PetscBool strict_aggs,PetscInt verbose,PetscCoarsenData **a_locals_llist)
 27: {
 28:   PetscErrorCode   ierr;
 29:   Mat_SeqAIJ       *matA,*matB=NULL;
 30:   Mat_MPIAIJ       *mpimat=NULL;
 31:   MPI_Comm         comm;
 32:   PetscInt         num_fine_ghosts,kk,n,ix,j,*idx,*ii,iter,Iend,my0,nremoved,gid,lid,cpid,lidj,sgid,t1,t2,slid,nDone,nselected=0,state,statej;
 33:   PetscInt         *cpcol_gid,*cpcol_state,*lid_cprowID,*lid_gid,*cpcol_sel_gid,*icpcol_gid,*lid_state,*lid_parent_gid=NULL;
 34:   PetscBool        *lid_removed;
 35:   PetscBool        isMPI,isAIJ,isOK;
 36:   PetscMPIInt      mype,npe;
 37:   const PetscInt   *perm_ix;
 38:   const PetscInt   nloc = Gmat->rmap->n;
 39:   PetscCoarsenData *agg_lists;
 40:   PetscLayout      layout;
 41:   PetscSF          sf;

 44:   PetscObjectGetComm((PetscObject)Gmat,&comm);
 45:   MPI_Comm_rank(comm, &mype);
 46:   MPI_Comm_size(comm, &npe);

 48:   /* get submatrices */
 49:   PetscObjectTypeCompare((PetscObject)Gmat,MATMPIAIJ,&isMPI);
 50:   if (isMPI) {
 51:     mpimat = (Mat_MPIAIJ*)Gmat->data;
 52:     matA   = (Mat_SeqAIJ*)mpimat->A->data;
 53:     matB   = (Mat_SeqAIJ*)mpimat->B->data;
 54:     /* force compressed storage of B */
 55:     MatCheckCompressedRow(mpimat->B,matB->nonzerorowcnt,&matB->compressedrow,matB->i,Gmat->rmap->n,-1.0);
 56:   } else {
 57:     PetscObjectTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isAIJ);
 58:     matA = (Mat_SeqAIJ*)Gmat->data;
 59:   }
 60:   MatGetOwnershipRange(Gmat,&my0,&Iend);
 61:   PetscMalloc1(nloc,&lid_gid); /* explicit array needed */
 62:   if (mpimat) {
 63:     for (kk=0,gid=my0; kk<nloc; kk++,gid++) {
 64:       lid_gid[kk] = gid;
 65:     }
 66:     VecGetLocalSize(mpimat->lvec, &num_fine_ghosts);
 67:     PetscMalloc1(num_fine_ghosts,&cpcol_gid);
 68:     PetscMalloc1(num_fine_ghosts,&cpcol_state);
 69:     PetscSFCreate(PetscObjectComm((PetscObject)Gmat),&sf);
 70:     MatGetLayouts(Gmat,&layout,NULL);
 71:     PetscSFSetGraphLayout(sf,layout,num_fine_ghosts,NULL,PETSC_COPY_VALUES,mpimat->garray);
 72:     PetscSFBcastBegin(sf,MPIU_INT,lid_gid,cpcol_gid);
 73:     PetscSFBcastEnd(sf,MPIU_INT,lid_gid,cpcol_gid);
 74:     for (kk=0;kk<num_fine_ghosts;kk++) {
 75:       cpcol_state[kk]=MIS_NOT_DONE;
 76:     }
 77:   } else num_fine_ghosts = 0;

 79:   PetscMalloc1(nloc, &lid_cprowID);
 80:   PetscMalloc1(nloc, &lid_removed); /* explicit array needed */
 81:   if (strict_aggs) {
 82:     PetscMalloc1(nloc,&lid_parent_gid);
 83:   }
 84:   PetscMalloc1(nloc,&lid_state);

 86:   /* has ghost nodes for !strict and uses local indexing (yuck) */
 87:   PetscCDCreate(strict_aggs ? nloc : num_fine_ghosts+nloc, &agg_lists);
 88:   if (a_locals_llist) *a_locals_llist = agg_lists;

 90:   /* need an inverse map - locals */
 91:   for (kk=0; kk<nloc; kk++) {
 92:     lid_cprowID[kk] = -1; lid_removed[kk] = PETSC_FALSE;
 93:     if (strict_aggs) {
 94:       lid_parent_gid[kk] = -1.0;
 95:     }
 96:     lid_state[kk] = MIS_NOT_DONE;
 97:   }
 98:   /* set index into cmpressed row 'lid_cprowID' */
 99:   if (matB) {
100:     for (ix=0; ix<matB->compressedrow.nrows; ix++) {
101:       lid = matB->compressedrow.rindex[ix];
102:       lid_cprowID[lid] = ix;
103:     }
104:   }
105:   /* MIS */
106:   iter = nremoved = nDone = 0;
107:   ISGetIndices(perm, &perm_ix);
108:   while (nDone < nloc || PETSC_TRUE) { /* asyncronous not implemented */
109:     iter++;
110:     /* check all vertices */
111:     for (kk=0; kk<nloc; kk++) {
112:       lid   = perm_ix[kk];
113:       state = lid_state[lid];
114:       if (lid_removed[lid]) continue;
115:       if (state == MIS_NOT_DONE) {
116:         /* parallel test, delete if selected ghost */
117:         isOK = PETSC_TRUE;
118:         if ((ix=lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
119:           ii  = matB->compressedrow.i; n = ii[ix+1] - ii[ix];
120:           idx = matB->j + ii[ix];
121:           for (j=0; j<n; j++) {
122:             cpid   = idx[j]; /* compressed row ID in B mat */
123:             gid    = cpcol_gid[cpid];
124:             statej = cpcol_state[cpid];
125:             if (statej == MIS_NOT_DONE && gid >= Iend) { /* should be (pe>mype), use gid as pe proxy */
126:               isOK = PETSC_FALSE; /* can not delete */
127:               break;
128:             }
129:           }
130:         } /* parallel test */
131:         if (isOK) { /* select or remove this vertex */
132:           nDone++;
133:           /* check for singleton */
134:           ii = matA->i; n = ii[lid+1] - ii[lid];
135:           if (n < 2) {
136:             /* if I have any ghost adj then not a sing */
137:             ix = lid_cprowID[lid];
138:             if (ix==-1 || (matB->compressedrow.i[ix+1]-matB->compressedrow.i[ix])==0) {
139:               nremoved++;
140:               lid_removed[lid] = PETSC_TRUE;
141:               /* should select this because it is technically in the MIS but lets not */
142:               continue; /* one local adj (me) and no ghost - singleton */
143:             }
144:           }
145:           /* SELECTED state encoded with global index */
146:           lid_state[lid] = lid+my0; /* needed???? */
147:           nselected++;
148:           if (strict_aggs) {
149:             PetscCDAppendID(agg_lists, lid, lid+my0);
150:           } else {
151:             PetscCDAppendID(agg_lists, lid, lid);
152:           }
153:           /* delete local adj */
154:           idx = matA->j + ii[lid];
155:           for (j=0; j<n; j++) {
156:             lidj   = idx[j];
157:             statej = lid_state[lidj];
158:             if (statej == MIS_NOT_DONE) {
159:               nDone++;
160:               if (strict_aggs) {
161:                 PetscCDAppendID(agg_lists, lid, lidj+my0);
162:               } else {
163:                 PetscCDAppendID(agg_lists, lid, lidj);
164:               }
165:               lid_state[lidj] = MIS_DELETED;  /* delete this */
166:             }
167:           }
168:           /* delete ghost adj of lid - deleted ghost done later for strict_aggs */
169:           if (!strict_aggs) {
170:             if ((ix=lid_cprowID[lid]) != -1) { /* if I have any ghost neighbors */
171:               ii  = matB->compressedrow.i; n = ii[ix+1] - ii[ix];
172:               idx = matB->j + ii[ix];
173:               for (j=0; j<n; j++) {
174:                 cpid   = idx[j]; /* compressed row ID in B mat */
175:                 statej = cpcol_state[cpid];
176:                 if (statej == MIS_NOT_DONE) {
177:                   PetscCDAppendID(agg_lists, lid, nloc+cpid);
178:                 }
179:               }
180:             }
181:           }
182:         } /* selected */
183:       } /* not done vertex */
184:     } /* vertex loop */

186:     /* update ghost states and count todos */
187:     if (mpimat) {
188:       /* scatter states, check for done */
189:       PetscSFBcastBegin(sf,MPIU_INT,lid_state,cpcol_state);
190:       PetscSFBcastEnd(sf,MPIU_INT,lid_state,cpcol_state);
191:       ii   = matB->compressedrow.i;
192:       for (ix=0; ix<matB->compressedrow.nrows; ix++) {
193:         lid   = matB->compressedrow.rindex[ix]; /* local boundary node */
194:         state = lid_state[lid];
195:         if (state == MIS_NOT_DONE) {
196:           /* look at ghosts */
197:           n   = ii[ix+1] - ii[ix];
198:           idx = matB->j + ii[ix];
199:           for (j=0; j<n; j++) {
200:             cpid   = idx[j]; /* compressed row ID in B mat */
201:             statej = cpcol_state[cpid];
202:             if (MIS_IS_SELECTED(statej)) { /* lid is now deleted, do it */
203:               nDone++;
204:               lid_state[lid] = MIS_DELETED; /* delete this */
205:               if (!strict_aggs) {
206:                 lidj = nloc + cpid;
207:                 PetscCDAppendID(agg_lists, lidj, lid);
208:               } else {
209:                 sgid = cpcol_gid[cpid];
210:                 lid_parent_gid[lid] = sgid; /* keep track of proc that I belong to */
211:               }
212:               break;
213:             }
214:           }
215:         }
216:       }
217:       /* all done? */
218:       t1   = nloc - nDone;
219:       MPI_Allreduce(&t1, &t2, 1, MPIU_INT, MPI_SUM, comm); /* synchronous version */
220:       if (t2 == 0) break;
221:     } else break; /* all done */
222:   } /* outer parallel MIS loop */
223:   ISRestoreIndices(perm,&perm_ix);

225:   if (verbose) {
226:     if (verbose == 1) {
227:       PetscPrintf(comm,"\t[%d]%s removed %d of %d vertices.  %d selected.\n",mype,__FUNCT__,nremoved,nloc,nselected);
228:     } else {
229:       MPI_Allreduce(&nremoved, &n, 1, MPIU_INT, MPI_SUM, comm);
230:       MatGetSize(Gmat, &kk, &j);
231:       MPI_Allreduce(&nselected, &j, 1, MPIU_INT, MPI_SUM, comm);
232:       PetscPrintf(comm,"\t[%d]%s removed %d of %d vertices. (%d local)  %d selected.\n",mype,__FUNCT__,n,kk,nremoved,j);
233:     }
234:   }

236:   /* tell adj who my lid_parent_gid vertices belong to - fill in agg_lists selected ghost lists */
237:   if (strict_aggs && matB) {
238:     /* need to copy this to free buffer -- should do this globaly */
239:     PetscMalloc1(num_fine_ghosts, &cpcol_sel_gid);
240:     PetscMalloc1(num_fine_ghosts, &icpcol_gid);
241:     for (cpid=0; cpid<num_fine_ghosts; cpid++) icpcol_gid[cpid] = cpcol_gid[cpid];

243:     /* get proc of deleted ghost */
244:     PetscSFBcastBegin(sf,MPIU_INT,lid_parent_gid,cpcol_sel_gid);
245:     PetscSFBcastEnd(sf,MPIU_INT,lid_parent_gid,cpcol_sel_gid);
246:     for (cpid=0; cpid<num_fine_ghosts; cpid++) {
247:       sgid = cpcol_sel_gid[cpid];
248:       gid  = icpcol_gid[cpid];
249:       if (sgid >= my0 && sgid < Iend) { /* I own this deleted */
250:         slid = sgid - my0;
251:         PetscCDAppendID(agg_lists, slid, gid);
252:       }
253:     }
254:     PetscFree(icpcol_gid);
255:     PetscFree(cpcol_sel_gid);
256:   }
257:   if (mpimat) {
258:     PetscSFDestroy(&sf);
259:     PetscFree(cpcol_gid);
260:     PetscFree(cpcol_state);
261:   }
262:   PetscFree(lid_cprowID);
263:   PetscFree(lid_gid);
264:   PetscFree(lid_removed);
265:   if (strict_aggs) {
266:     PetscFree(lid_parent_gid);
267:   }
268:   PetscFree(lid_state);
269:   return(0);
270: }

272: typedef struct {
273:   int dummy;
274: } MatCoarsen_MIS;
275: /*
276:    MIS coarsen, simple greedy.
277: */
280: static PetscErrorCode MatCoarsenApply_MIS(MatCoarsen coarse)
281: {
282:   /* MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->; */
284:   Mat            mat = coarse->graph;

288:   if (!coarse->perm) {
289:     IS       perm;
290:     PetscInt n,m;
291:     MPI_Comm comm;
292:     PetscObjectGetComm((PetscObject)mat,&comm);
293:     MatGetLocalSize(mat, &m, &n);
294:     ISCreateStride(comm, m, 0, 1, &perm);
295:     maxIndSetAgg(perm, mat, coarse->strict_aggs, coarse->verbose, &coarse->agg_lists);
296:     ISDestroy(&perm);
297:   } else {
298:     maxIndSetAgg(coarse->perm, mat, coarse->strict_aggs, coarse->verbose, &coarse->agg_lists);
299:   }
300:   return(0);
301: }

305: PetscErrorCode MatCoarsenView_MIS(MatCoarsen coarse,PetscViewer viewer)
306: {
307:   /* MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->; */
309:   PetscMPIInt    rank;
310:   PetscBool      iascii;

314:   MPI_Comm_rank(PetscObjectComm((PetscObject)coarse),&rank);
315:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
316:   if (iascii) {
317:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d] MIS aggregator\n",rank);
318:     PetscViewerFlush(viewer);
319:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
320:   }
321:   return(0);
322: }

326: PetscErrorCode MatCoarsenDestroy_MIS(MatCoarsen coarse)
327: {
328:   MatCoarsen_MIS *MIS = (MatCoarsen_MIS*)coarse->subctx;

333:   PetscFree(MIS);
334:   return(0);
335: }

337: /*MC
338:    MATCOARSENMIS - Creates a coarsen context via the external package MIS.

340:    Collective on MPI_Comm

342:    Input Parameter:
343: .  coarse - the coarsen context

345:    Options Database Keys:
346: +  -mat_coarsen_MIS_xxx -

348:    Level: beginner

350: .keywords: Coarsen, create, context

352: .seealso: MatCoarsenSetType(), MatCoarsenType

354: M*/

358: PETSC_EXTERN PetscErrorCode MatCoarsenCreate_MIS(MatCoarsen coarse)
359: {
361:   MatCoarsen_MIS *MIS;

364:   PetscNewLog(coarse,&MIS);
365:   coarse->subctx = (void*)MIS;

367:   coarse->ops->apply   = MatCoarsenApply_MIS;
368:   coarse->ops->view    = MatCoarsenView_MIS;
369:   coarse->ops->destroy = MatCoarsenDestroy_MIS;
370:   /* coarse->ops->setfromoptions = MatCoarsenSetFromOptions_MIS; */
371:   return(0);
372: }