Actual source code: mpiptap.c

petsc-master 2020-08-06
Report Typos and Errors

  2: /*
  3:   Defines projective product routines where A is a MPIAIJ matrix
  4:           C = P^T * A * P
  5: */

  7:  #include <../src/mat/impls/aij/seq/aij.h>
  8:  #include <../src/mat/utils/freespace.h>
  9:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
 10:  #include <petscbt.h>
 11:  #include <petsctime.h>
 12:  #include <petsc/private/hashmapiv.h>
 13:  #include <petsc/private/hashseti.h>
 14:  #include <petscsf.h>

 16: PetscErrorCode MatView_MPIAIJ_PtAP(Mat A,PetscViewer viewer)
 17: {
 18:   PetscErrorCode    ierr;
 19:   PetscBool         iascii;
 20:   PetscViewerFormat format;
 21:   Mat_APMPI         *ptap;

 24:   MatCheckProduct(A,1);
 25:   ptap = (Mat_APMPI*)A->product->data;
 26:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 27:   if (iascii) {
 28:     PetscViewerGetFormat(viewer,&format);
 29:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
 30:       if (ptap->algType == 0) {
 31:         PetscViewerASCIIPrintf(viewer,"using scalable MatPtAP() implementation\n");
 32:       } else if (ptap->algType == 1) {
 33:         PetscViewerASCIIPrintf(viewer,"using nonscalable MatPtAP() implementation\n");
 34:       } else if (ptap->algType == 2) {
 35:         PetscViewerASCIIPrintf(viewer,"using allatonce MatPtAP() implementation\n");
 36:       } else if (ptap->algType == 3) {
 37:         PetscViewerASCIIPrintf(viewer,"using merged allatonce MatPtAP() implementation\n");
 38:       }
 39:     }
 40:   }
 41:   return(0);
 42: }

 44: PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *data)
 45: {
 46:   PetscErrorCode      ierr;
 47:   Mat_APMPI           *ptap = (Mat_APMPI*)data;
 48:   Mat_Merge_SeqsToMPI *merge;

 51:   PetscFree2(ptap->startsj_s,ptap->startsj_r);
 52:   PetscFree(ptap->bufa);
 53:   MatDestroy(&ptap->P_loc);
 54:   MatDestroy(&ptap->P_oth);
 55:   MatDestroy(&ptap->A_loc); /* used by MatTransposeMatMult() */
 56:   MatDestroy(&ptap->Rd);
 57:   MatDestroy(&ptap->Ro);
 58:   if (ptap->AP_loc) { /* used by alg_rap */
 59:     Mat_SeqAIJ *ap = (Mat_SeqAIJ*)(ptap->AP_loc)->data;
 60:     PetscFree(ap->i);
 61:     PetscFree2(ap->j,ap->a);
 62:     MatDestroy(&ptap->AP_loc);
 63:   } else { /* used by alg_ptap */
 64:     PetscFree(ptap->api);
 65:     PetscFree(ptap->apj);
 66:   }
 67:   MatDestroy(&ptap->C_loc);
 68:   MatDestroy(&ptap->C_oth);
 69:   if (ptap->apa) {PetscFree(ptap->apa);}

 71:   MatDestroy(&ptap->Pt);

 73:   merge = ptap->merge;
 74:   if (merge) { /* used by alg_ptap */
 75:     PetscFree(merge->id_r);
 76:     PetscFree(merge->len_s);
 77:     PetscFree(merge->len_r);
 78:     PetscFree(merge->bi);
 79:     PetscFree(merge->bj);
 80:     PetscFree(merge->buf_ri[0]);
 81:     PetscFree(merge->buf_ri);
 82:     PetscFree(merge->buf_rj[0]);
 83:     PetscFree(merge->buf_rj);
 84:     PetscFree(merge->coi);
 85:     PetscFree(merge->coj);
 86:     PetscFree(merge->owners_co);
 87:     PetscLayoutDestroy(&merge->rowmap);
 88:     PetscFree(ptap->merge);
 89:   }
 90:   ISLocalToGlobalMappingDestroy(&ptap->ltog);

 92:   PetscSFDestroy(&ptap->sf);
 93:   PetscFree(ptap->c_othi);
 94:   PetscFree(ptap->c_rmti);
 95:   PetscFree(ptap);
 96:   return(0);
 97: }

 99: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,Mat C)
100: {
101:   PetscErrorCode    ierr;
102:   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
103:   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
104:   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
105:   Mat_APMPI         *ptap;
106:   Mat               AP_loc,C_loc,C_oth;
107:   PetscInt          i,rstart,rend,cm,ncols,row,*api,*apj,am = A->rmap->n,apnz,nout;
108:   PetscScalar       *apa;
109:   const PetscInt    *cols;
110:   const PetscScalar *vals;

113:   MatCheckProduct(C,3);
114:   ptap = (Mat_APMPI*)C->product->data;
115:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
116:   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

118:   MatZeroEntries(C);

120:   /* 1) get R = Pd^T,Ro = Po^T */
121:   if (ptap->reuse == MAT_REUSE_MATRIX) {
122:     MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);
123:     MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);
124:   }

126:   /* 2) get AP_loc */
127:   AP_loc = ptap->AP_loc;
128:   ap = (Mat_SeqAIJ*)AP_loc->data;

130:   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
131:   /*-----------------------------------------------------*/
132:   if (ptap->reuse == MAT_REUSE_MATRIX) {
133:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
134:     MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
135:     MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);
136:   }

138:   /* 2-2) compute numeric A_loc*P - dominating part */
139:   /* ---------------------------------------------- */
140:   /* get data from symbolic products */
141:   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
142:   if (ptap->P_oth) p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;

144:   api   = ap->i;
145:   apj   = ap->j;
146:   ISLocalToGlobalMappingApply(ptap->ltog,api[AP_loc->rmap->n],apj,apj);
147:   for (i=0; i<am; i++) {
148:     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
149:     apnz = api[i+1] - api[i];
150:     apa = ap->a + api[i];
151:     PetscArrayzero(apa,apnz);
152:     AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa);
153:   }
154:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,api[AP_loc->rmap->n],apj,&nout,apj);
155:   if (api[AP_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",api[AP_loc->rmap->n],nout);

157:   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
158:   /* Always use scalable version since we are in the MPI scalable version */
159:   MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Rd,AP_loc,ptap->C_loc);
160:   MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(ptap->Ro,AP_loc,ptap->C_oth);

162:   C_loc = ptap->C_loc;
163:   C_oth = ptap->C_oth;

165:   /* add C_loc and Co to to C */
166:   MatGetOwnershipRange(C,&rstart,&rend);

168:   /* C_loc -> C */
169:   cm    = C_loc->rmap->N;
170:   c_seq = (Mat_SeqAIJ*)C_loc->data;
171:   cols = c_seq->j;
172:   vals = c_seq->a;
173:   ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_loc->rmap->n],c_seq->j,c_seq->j);

175:   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
176:   /* when there are no off-processor parts.  */
177:   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
178:   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
179:   /* a table, and other, more complex stuff has to be done. */
180:   if (C->assembled) {
181:     C->was_assembled = PETSC_TRUE;
182:     C->assembled     = PETSC_FALSE;
183:   }
184:   if (C->was_assembled) {
185:     for (i=0; i<cm; i++) {
186:       ncols = c_seq->i[i+1] - c_seq->i[i];
187:       row = rstart + i;
188:       MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);
189:       cols += ncols; vals += ncols;
190:     }
191:   } else {
192:     MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);
193:   }
194:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_loc->rmap->n],c_seq->j,&nout,c_seq->j);
195:   if (c_seq->i[C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);

197:   /* Co -> C, off-processor part */
198:   cm = C_oth->rmap->N;
199:   c_seq = (Mat_SeqAIJ*)C_oth->data;
200:   cols = c_seq->j;
201:   vals = c_seq->a;
202:   ISLocalToGlobalMappingApply(ptap->ltog,c_seq->i[C_oth->rmap->n],c_seq->j,c_seq->j);
203:   for (i=0; i<cm; i++) {
204:     ncols = c_seq->i[i+1] - c_seq->i[i];
205:     row = p->garray[i];
206:     MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);
207:     cols += ncols; vals += ncols;
208:   }
209:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
210:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

212:   ptap->reuse = MAT_REUSE_MATRIX;

214:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_seq->i[C_oth->rmap->n],c_seq->j,&nout,c_seq->j);
215:   if (c_seq->i[C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_seq->i[C_loc->rmap->n],nout);
216:   return(0);
217: }

219: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat A,Mat P,PetscReal fill,Mat Cmpi)
220: {
221:   PetscErrorCode      ierr;
222:   Mat_APMPI           *ptap;
223:   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
224:   MPI_Comm            comm;
225:   PetscMPIInt         size,rank;
226:   Mat                 P_loc,P_oth;
227:   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
228:   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
229:   PetscInt            *lnk,i,k,pnz,row,nsend;
230:   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
231:   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
232:   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
233:   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
234:   MPI_Request         *swaits,*rwaits;
235:   MPI_Status          *sstatus,rstatus;
236:   PetscLayout         rowmap;
237:   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
238:   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
239:   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,Crmax,*aj,*ai,*pi,nout;
240:   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
241:   PetscScalar         *apv;
242:   PetscTable          ta;
243:   MatType             mtype;
244:   const char          *prefix;
245: #if defined(PETSC_USE_INFO)
246:   PetscReal           apfill;
247: #endif

250:   MatCheckProduct(Cmpi,4);
251:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
252:   PetscObjectGetComm((PetscObject)A,&comm);
253:   MPI_Comm_size(comm,&size);
254:   MPI_Comm_rank(comm,&rank);

256:   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;

258:   /* create symbolic parallel matrix Cmpi */
259:   MatGetType(A,&mtype);
260:   MatSetType(Cmpi,mtype);

262:   /* create struct Mat_APMPI and attached it to C later */
263:   PetscNew(&ptap);
264:   ptap->reuse = MAT_INITIAL_MATRIX;
265:   ptap->algType = 0;

267:   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
268:   MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&P_oth);
269:   /* get P_loc by taking all local rows of P */
270:   MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&P_loc);

272:   ptap->P_loc = P_loc;
273:   ptap->P_oth = P_oth;

275:   /* (0) compute Rd = Pd^T, Ro = Po^T  */
276:   /* --------------------------------- */
277:   MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);
278:   MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);

280:   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
281:   /* ----------------------------------------------------------------- */
282:   p_loc  = (Mat_SeqAIJ*)P_loc->data;
283:   if (P_oth) p_oth = (Mat_SeqAIJ*)P_oth->data;

285:   /* create and initialize a linked list */
286:   PetscTableCreate(pn,pN,&ta); /* for compute AP_loc and Cmpi */
287:   MatRowMergeMax_SeqAIJ(p_loc,P_loc->rmap->N,ta);
288:   MatRowMergeMax_SeqAIJ(p_oth,P_oth->rmap->N,ta);
289:   PetscTableGetCount(ta,&Crmax); /* Crmax = nnz(sum of Prows) */

291:   PetscLLCondensedCreate_Scalable(Crmax,&lnk);

293:   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
294:   if (ao) {
295:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);
296:   } else {
297:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);
298:   }
299:   current_space = free_space;
300:   nspacedouble  = 0;

302:   PetscMalloc1(am+1,&api);
303:   api[0] = 0;
304:   for (i=0; i<am; i++) {
305:     /* diagonal portion: Ad[i,:]*P */
306:     ai = ad->i; pi = p_loc->i;
307:     nzi = ai[i+1] - ai[i];
308:     aj  = ad->j + ai[i];
309:     for (j=0; j<nzi; j++) {
310:       row  = aj[j];
311:       pnz  = pi[row+1] - pi[row];
312:       Jptr = p_loc->j + pi[row];
313:       /* add non-zero cols of P into the sorted linked list lnk */
314:       PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);
315:     }
316:     /* off-diagonal portion: Ao[i,:]*P */
317:     if (ao) {
318:       ai = ao->i; pi = p_oth->i;
319:       nzi = ai[i+1] - ai[i];
320:       aj  = ao->j + ai[i];
321:       for (j=0; j<nzi; j++) {
322:         row  = aj[j];
323:         pnz  = pi[row+1] - pi[row];
324:         Jptr = p_oth->j + pi[row];
325:         PetscLLCondensedAddSorted_Scalable(pnz,Jptr,lnk);
326:       }
327:     }
328:     apnz     = lnk[0];
329:     api[i+1] = api[i] + apnz;

331:     /* if free space is not available, double the total space in the list */
332:     if (current_space->local_remaining<apnz) {
333:       PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);
334:       nspacedouble++;
335:     }

337:     /* Copy data into free space, then initialize lnk */
338:     PetscLLCondensedClean_Scalable(apnz,current_space->array,lnk);

340:     current_space->array           += apnz;
341:     current_space->local_used      += apnz;
342:     current_space->local_remaining -= apnz;
343:   }
344:   /* Allocate space for apj and apv, initialize apj, and */
345:   /* destroy list of free space and other temporary array(s) */
346:   PetscCalloc2(api[am],&apj,api[am],&apv);
347:   PetscFreeSpaceContiguous(&free_space,apj);
348:   PetscLLCondensedDestroy_Scalable(lnk);

350:   /* Create AP_loc for reuse */
351:   MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);
352:   MatSeqAIJCompactOutExtraColumns_SeqAIJ(ptap->AP_loc, &ptap->ltog);

354: #if defined(PETSC_USE_INFO)
355:   if (ao) {
356:     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
357:   } else {
358:     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
359:   }
360:   ptap->AP_loc->info.mallocs           = nspacedouble;
361:   ptap->AP_loc->info.fill_ratio_given  = fill;
362:   ptap->AP_loc->info.fill_ratio_needed = apfill;

364:   if (api[am]) {
365:     PetscInfo3(ptap->AP_loc,"Scalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);
366:     PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);
367:   } else {
368:     PetscInfo(ptap->AP_loc,"Scalable algorithm, AP_loc is empty \n");
369:   }
370: #endif

372:   /* (2-1) compute symbolic Co = Ro*AP_loc  */
373:   /* -------------------------------------- */
374:   MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);
375:   MatGetOptionsPrefix(A,&prefix);
376:   MatSetOptionsPrefix(ptap->C_oth,prefix);
377:   MatAppendOptionsPrefix(ptap->C_oth,"inner_offdiag_");

379:   MatProductSetType(ptap->C_oth,MATPRODUCT_AB);
380:   MatProductSetAlgorithm(ptap->C_oth,"sorted");
381:   MatProductSetFill(ptap->C_oth,fill);
382:   MatProductSetFromOptions(ptap->C_oth);
383:   MatProductSymbolic(ptap->C_oth);

385:   /* (3) send coj of C_oth to other processors  */
386:   /* ------------------------------------------ */
387:   /* determine row ownership */
388:   PetscLayoutCreate(comm,&rowmap);
389:   rowmap->n  = pn;
390:   rowmap->bs = 1;
391:   PetscLayoutSetUp(rowmap);
392:   owners = rowmap->range;

394:   /* determine the number of messages to send, their lengths */
395:   PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);
396:   PetscArrayzero(len_s,size);
397:   PetscArrayzero(len_si,size);

399:   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
400:   coi   = c_oth->i; coj = c_oth->j;
401:   con   = ptap->C_oth->rmap->n;
402:   proc  = 0;
403:   ISLocalToGlobalMappingApply(ptap->ltog,coi[con],coj,coj);
404:   for (i=0; i<con; i++) {
405:     while (prmap[i] >= owners[proc+1]) proc++;
406:     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
407:     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
408:   }

410:   len          = 0; /* max length of buf_si[], see (4) */
411:   owners_co[0] = 0;
412:   nsend        = 0;
413:   for (proc=0; proc<size; proc++) {
414:     owners_co[proc+1] = owners_co[proc] + len_si[proc];
415:     if (len_s[proc]) {
416:       nsend++;
417:       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
418:       len         += len_si[proc];
419:     }
420:   }

422:   /* determine the number and length of messages to receive for coi and coj  */
423:   PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);
424:   PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);

426:   /* post the Irecv and Isend of coj */
427:   PetscCommGetNewTag(comm,&tagj);
428:   PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);
429:   PetscMalloc1(nsend+1,&swaits);
430:   for (proc=0, k=0; proc<size; proc++) {
431:     if (!len_s[proc]) continue;
432:     i    = owners_co[proc];
433:     MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);
434:     k++;
435:   }

437:   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
438:   /* ---------------------------------------- */
439:   MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);
440:   MatProductSetType(ptap->C_loc,MATPRODUCT_AB);
441:   MatProductSetAlgorithm(ptap->C_loc,"default");
442:   MatProductSetFill(ptap->C_loc,fill);

444:   MatSetOptionsPrefix(ptap->C_loc,prefix);
445:   MatAppendOptionsPrefix(ptap->C_loc,"inner_diag_");

447:   MatProductSetFromOptions(ptap->C_loc);
448:   MatProductSymbolic(ptap->C_loc);

450:   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;
451:   ISLocalToGlobalMappingApply(ptap->ltog,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,c_loc->j);

453:   /* receives coj are complete */
454:   for (i=0; i<nrecv; i++) {
455:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
456:   }
457:   PetscFree(rwaits);
458:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

460:   /* add received column indices into ta to update Crmax */
461:   for (k=0; k<nrecv; k++) {/* k-th received message */
462:     Jptr = buf_rj[k];
463:     for (j=0; j<len_r[k]; j++) {
464:       PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);
465:     }
466:   }
467:   PetscTableGetCount(ta,&Crmax);
468:   PetscTableDestroy(&ta);

470:   /* (4) send and recv coi */
471:   /*-----------------------*/
472:   PetscCommGetNewTag(comm,&tagi);
473:   PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);
474:   PetscMalloc1(len+1,&buf_s);
475:   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
476:   for (proc=0,k=0; proc<size; proc++) {
477:     if (!len_s[proc]) continue;
478:     /* form outgoing message for i-structure:
479:          buf_si[0]:                 nrows to be sent
480:                [1:nrows]:           row index (global)
481:                [nrows+1:2*nrows+1]: i-structure index
482:     */
483:     /*-------------------------------------------*/
484:     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
485:     buf_si_i    = buf_si + nrows+1;
486:     buf_si[0]   = nrows;
487:     buf_si_i[0] = 0;
488:     nrows       = 0;
489:     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
490:       nzi = coi[i+1] - coi[i];
491:       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
492:       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
493:       nrows++;
494:     }
495:     MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);
496:     k++;
497:     buf_si += len_si[proc];
498:   }
499:   for (i=0; i<nrecv; i++) {
500:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
501:   }
502:   PetscFree(rwaits);
503:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

505:   PetscFree4(len_s,len_si,sstatus,owners_co);
506:   PetscFree(len_ri);
507:   PetscFree(swaits);
508:   PetscFree(buf_s);

510:   /* (5) compute the local portion of Cmpi      */
511:   /* ------------------------------------------ */
512:   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
513:   PetscFreeSpaceGet(Crmax,&free_space);
514:   current_space = free_space;

516:   PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);
517:   for (k=0; k<nrecv; k++) {
518:     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
519:     nrows       = *buf_ri_k[k];
520:     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
521:     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
522:   }

524:   MatPreallocateInitialize(comm,pn,pn,dnz,onz);
525:   PetscLLCondensedCreate_Scalable(Crmax,&lnk);
526:   for (i=0; i<pn; i++) {
527:     /* add C_loc into Cmpi */
528:     nzi  = c_loc->i[i+1] - c_loc->i[i];
529:     Jptr = c_loc->j + c_loc->i[i];
530:     PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);

532:     /* add received col data into lnk */
533:     for (k=0; k<nrecv; k++) { /* k-th received message */
534:       if (i == *nextrow[k]) { /* i-th row */
535:         nzi  = *(nextci[k]+1) - *nextci[k];
536:         Jptr = buf_rj[k] + *nextci[k];
537:         PetscLLCondensedAddSorted_Scalable(nzi,Jptr,lnk);
538:         nextrow[k]++; nextci[k]++;
539:       }
540:     }
541:     nzi = lnk[0];

543:     /* copy data into free space, then initialize lnk */
544:     PetscLLCondensedClean_Scalable(nzi,current_space->array,lnk);
545:     MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);
546:   }
547:   PetscFree3(buf_ri_k,nextrow,nextci);
548:   PetscLLCondensedDestroy_Scalable(lnk);
549:   PetscFreeSpaceDestroy(free_space);

551:   /* local sizes and preallocation */
552:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
553:   if (P->cmap->bs > 0) {
554:     PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);
555:     PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);
556:   }
557:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
558:   MatPreallocateFinalize(dnz,onz);

560:   /* members in merge */
561:   PetscFree(id_r);
562:   PetscFree(len_r);
563:   PetscFree(buf_ri[0]);
564:   PetscFree(buf_ri);
565:   PetscFree(buf_rj[0]);
566:   PetscFree(buf_rj);
567:   PetscLayoutDestroy(&rowmap);

569:   nout = 0;
570:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_oth->i[ptap->C_oth->rmap->n],c_oth->j,&nout,c_oth->j);
571:   if (c_oth->i[ptap->C_oth->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_oth->i[ptap->C_oth->rmap->n],nout);
572:   ISGlobalToLocalMappingApply(ptap->ltog,IS_GTOLM_DROP,c_loc->i[ptap->C_loc->rmap->n],c_loc->j,&nout,c_loc->j);
573:   if (c_loc->i[ptap->C_loc->rmap->n] != nout) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Incorrect mapping %D != %D\n",c_loc->i[ptap->C_loc->rmap->n],nout);

575:   /* attach the supporting struct to Cmpi for reuse */
576:   Cmpi->product->data    = ptap;
577:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;
578:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;

580:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
581:   Cmpi->assembled        = PETSC_FALSE;
582:   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable;
583:   return(0);
584: }

586: PETSC_STATIC_INLINE PetscErrorCode MatPtAPSymbolicComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHSetI dht,PetscHSetI oht)
587: {
588:   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
589:   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
590:   PetscInt             *ai,nzi,j,*aj,row,col,*pi,*pj,pnz,nzpi,*p_othcols,k;
591:   PetscInt             pcstart,pcend,column,offset;
592:   PetscErrorCode       ierr;

595:   pcstart = P->cmap->rstart;
596:   pcstart *= dof;
597:   pcend   = P->cmap->rend;
598:   pcend   *= dof;
599:   /* diagonal portion: Ad[i,:]*P */
600:   ai = ad->i;
601:   nzi = ai[i+1] - ai[i];
602:   aj  = ad->j + ai[i];
603:   for (j=0; j<nzi; j++) {
604:     row  = aj[j];
605:     offset = row%dof;
606:     row   /= dof;
607:     nzpi = pd->i[row+1] - pd->i[row];
608:     pj  = pd->j + pd->i[row];
609:     for (k=0; k<nzpi; k++) {
610:       PetscHSetIAdd(dht,pj[k]*dof+offset+pcstart);
611:     }
612:   }
613:   /* off diag P */
614:   for (j=0; j<nzi; j++) {
615:     row  = aj[j];
616:     offset = row%dof;
617:     row   /= dof;
618:     nzpi = po->i[row+1] - po->i[row];
619:     pj  = po->j + po->i[row];
620:     for (k=0; k<nzpi; k++) {
621:       PetscHSetIAdd(oht,p->garray[pj[k]]*dof+offset);
622:     }
623:   }

625:   /* off diagonal part: Ao[i, :]*P_oth */
626:   if (ao) {
627:     ai = ao->i;
628:     pi = p_oth->i;
629:     nzi = ai[i+1] - ai[i];
630:     aj  = ao->j + ai[i];
631:     for (j=0; j<nzi; j++) {
632:       row  = aj[j];
633:       offset = a->garray[row]%dof;
634:       row  = map[row];
635:       pnz  = pi[row+1] - pi[row];
636:       p_othcols = p_oth->j + pi[row];
637:       for (col=0; col<pnz; col++) {
638:         column = p_othcols[col] * dof + offset;
639:         if (column>=pcstart && column<pcend) {
640:           PetscHSetIAdd(dht,column);
641:         } else {
642:           PetscHSetIAdd(oht,column);
643:         }
644:       }
645:     }
646:   } /* end if (ao) */
647:   return(0);
648: }

650: PETSC_STATIC_INLINE PetscErrorCode MatPtAPNumericComputeOneRowOfAP_private(Mat A,Mat P,Mat P_oth,const PetscInt *map,PetscInt dof,PetscInt i,PetscHMapIV hmap)
651: {
652:   Mat_MPIAIJ           *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
653:   Mat_SeqAIJ           *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data,*p_oth=(Mat_SeqAIJ*)P_oth->data,*pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
654:   PetscInt             *ai,nzi,j,*aj,row,col,*pi,pnz,*p_othcols,pcstart,*pj,k,nzpi,offset;
655:   PetscScalar          ra,*aa,*pa;
656:   PetscErrorCode       ierr;

659:   pcstart = P->cmap->rstart;
660:   pcstart *= dof;

662:   /* diagonal portion: Ad[i,:]*P */
663:   ai  = ad->i;
664:   nzi = ai[i+1] - ai[i];
665:   aj  = ad->j + ai[i];
666:   aa  = ad->a + ai[i];
667:   for (j=0; j<nzi; j++) {
668:     ra   = aa[j];
669:     row  = aj[j];
670:     offset = row%dof;
671:     row    /= dof;
672:     nzpi = pd->i[row+1] - pd->i[row];
673:     pj = pd->j + pd->i[row];
674:     pa = pd->a + pd->i[row];
675:     for (k=0; k<nzpi; k++) {
676:       PetscHMapIVAddValue(hmap,pj[k]*dof+offset+pcstart,ra*pa[k]);
677:     }
678:     PetscLogFlops(2.0*nzpi);
679:   }
680:   for (j=0; j<nzi; j++) {
681:     ra   = aa[j];
682:     row  = aj[j];
683:     offset = row%dof;
684:     row   /= dof;
685:     nzpi = po->i[row+1] - po->i[row];
686:     pj = po->j + po->i[row];
687:     pa = po->a + po->i[row];
688:     for (k=0; k<nzpi; k++) {
689:       PetscHMapIVAddValue(hmap,p->garray[pj[k]]*dof+offset,ra*pa[k]);
690:     }
691:     PetscLogFlops(2.0*nzpi);
692:   }

694:   /* off diagonal part: Ao[i, :]*P_oth */
695:   if (ao) {
696:     ai = ao->i;
697:     pi = p_oth->i;
698:     nzi = ai[i+1] - ai[i];
699:     aj  = ao->j + ai[i];
700:     aa  = ao->a + ai[i];
701:     for (j=0; j<nzi; j++) {
702:       row  = aj[j];
703:       offset = a->garray[row]%dof;
704:       row    = map[row];
705:       ra   = aa[j];
706:       pnz  = pi[row+1] - pi[row];
707:       p_othcols = p_oth->j + pi[row];
708:       pa   = p_oth->a + pi[row];
709:       for (col=0; col<pnz; col++) {
710:         PetscHMapIVAddValue(hmap,p_othcols[col]*dof+offset,ra*pa[col]);
711:       }
712:       PetscLogFlops(2.0*pnz);
713:     }
714:   } /* end if (ao) */

716:   return(0);
717: }

719: PetscErrorCode MatGetBrowsOfAcols_MPIXAIJ(Mat,Mat,PetscInt dof,MatReuse,Mat*);

721: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,Mat C)
722: {
723:   PetscErrorCode    ierr;
724:   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
725:   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
726:   Mat_APMPI         *ptap;
727:   PetscHMapIV       hmap;
728:   PetscInt          i,j,jj,kk,nzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,ccstart,ccend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,*dcc,*occ,loc;
729:   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
730:   PetscInt          offset,ii,pocol;
731:   const PetscInt    *mappingindices;
732:   IS                map;

735:   MatCheckProduct(C,4);
736:   ptap = (Mat_APMPI*)C->product->data;
737:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
738:   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

740:   MatZeroEntries(C);

742:   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
743:   /*-----------------------------------------------------*/
744:   if (ptap->reuse == MAT_REUSE_MATRIX) {
745:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
746:      MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);
747:   }
748:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);

750:   MatGetLocalSize(p->B,NULL,&pon);
751:   pon *= dof;
752:   PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);
753:   MatGetLocalSize(A,&am,NULL);
754:   cmaxr = 0;
755:   for (i=0; i<pon; i++) {
756:     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
757:   }
758:   PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);
759:   PetscHMapIVCreate(&hmap);
760:   PetscHMapIVResize(hmap,cmaxr);
761:   ISGetIndices(map,&mappingindices);
762:   for (i=0; i<am && pon; i++) {
763:     PetscHMapIVClear(hmap);
764:     offset = i%dof;
765:     ii     = i/dof;
766:     nzi = po->i[ii+1] - po->i[ii];
767:     if (!nzi) continue;
768:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
769:     voff = 0;
770:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
771:     if (!voff) continue;

773:     /* Form C(ii, :) */
774:     poj = po->j + po->i[ii];
775:     poa = po->a + po->i[ii];
776:     for (j=0; j<nzi; j++) {
777:       pocol = poj[j]*dof+offset;
778:       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
779:       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
780:       for (jj=0; jj<voff; jj++) {
781:         apvaluestmp[jj] = apvalues[jj]*poa[j];
782:         /*If the row is empty */
783:         if (!c_rmtc[pocol]) {
784:           c_rmtjj[jj] = apindices[jj];
785:           c_rmtaa[jj] = apvaluestmp[jj];
786:           c_rmtc[pocol]++;
787:         } else {
788:           PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);
789:           if (loc>=0){ /* hit */
790:             c_rmtaa[loc] += apvaluestmp[jj];
791:             PetscLogFlops(1.0);
792:           } else { /* new element */
793:             loc = -(loc+1);
794:             /* Move data backward */
795:             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
796:               c_rmtjj[kk] = c_rmtjj[kk-1];
797:               c_rmtaa[kk] = c_rmtaa[kk-1];
798:             }/* End kk */
799:             c_rmtjj[loc] = apindices[jj];
800:             c_rmtaa[loc] = apvaluestmp[jj];
801:             c_rmtc[pocol]++;
802:           }
803:         }
804:         PetscLogFlops(voff);
805:       } /* End jj */
806:     } /* End j */
807:   } /* End i */

809:   PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);
810:   PetscHMapIVDestroy(&hmap);

812:   MatGetLocalSize(P,NULL,&pn);
813:   pn *= dof;
814:   PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);

816:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
817:   PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
818:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
819:   pcstart = pcstart*dof;
820:   pcend   = pcend*dof;
821:   cd = (Mat_SeqAIJ*)(c->A)->data;
822:   co = (Mat_SeqAIJ*)(c->B)->data;

824:   cmaxr = 0;
825:   for (i=0; i<pn; i++) {
826:     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
827:   }
828:   PetscCalloc5(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pn,&dcc,pn,&occ);
829:   PetscHMapIVCreate(&hmap);
830:   PetscHMapIVResize(hmap,cmaxr);
831:   for (i=0; i<am && pn; i++) {
832:     PetscHMapIVClear(hmap);
833:     offset = i%dof;
834:     ii     = i/dof;
835:     nzi = pd->i[ii+1] - pd->i[ii];
836:     if (!nzi) continue;
837:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
838:     voff = 0;
839:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
840:     if (!voff) continue;
841:     /* Form C(ii, :) */
842:     pdj = pd->j + pd->i[ii];
843:     pda = pd->a + pd->i[ii];
844:     for (j=0; j<nzi; j++) {
845:       row = pcstart + pdj[j] * dof + offset;
846:       for (jj=0; jj<voff; jj++) {
847:         apvaluestmp[jj] = apvalues[jj]*pda[j];
848:       }
849:       PetscLogFlops(voff);
850:       MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);
851:     }
852:   }
853:   ISRestoreIndices(map,&mappingindices);
854:   MatGetOwnershipRangeColumn(C,&ccstart,&ccend);
855:   PetscFree5(apindices,apvalues,apvaluestmp,dcc,occ);
856:   PetscHMapIVDestroy(&hmap);
857:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
858:   PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
859:   PetscFree2(c_rmtj,c_rmta);

861:   /* Add contributions from remote */
862:   for (i = 0; i < pn; i++) {
863:     row = i + pcstart;
864:     MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);
865:   }
866:   PetscFree2(c_othj,c_otha);

868:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
869:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

871:   ptap->reuse = MAT_REUSE_MATRIX;
872:   return(0);
873: }

875: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,Mat C)
876: {
877:   PetscErrorCode      ierr;


881:   MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce(A,P,1,C);
882:   return(0);
883: }

885: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,Mat C)
886: {
887:   PetscErrorCode    ierr;
888:   Mat_MPIAIJ        *p=(Mat_MPIAIJ*)P->data,*c=(Mat_MPIAIJ*)C->data;
889:   Mat_SeqAIJ        *cd,*co,*po=(Mat_SeqAIJ*)p->B->data,*pd=(Mat_SeqAIJ*)p->A->data;
890:   Mat_APMPI         *ptap;
891:   PetscHMapIV       hmap;
892:   PetscInt          i,j,jj,kk,nzi,dnzi,*c_rmtj,voff,*c_othj,pn,pon,pcstart,pcend,row,am,*poj,*pdj,*apindices,cmaxr,*c_rmtc,*c_rmtjj,loc;
893:   PetscScalar       *c_rmta,*c_otha,*poa,*pda,*apvalues,*apvaluestmp,*c_rmtaa;
894:   PetscInt          offset,ii,pocol;
895:   const PetscInt    *mappingindices;
896:   IS                map;

899:   MatCheckProduct(C,4);
900:   ptap = (Mat_APMPI*)C->product->data;
901:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
902:   if (!ptap->P_oth) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

904:   MatZeroEntries(C);

906:   /* Get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
907:   /*-----------------------------------------------------*/
908:   if (ptap->reuse == MAT_REUSE_MATRIX) {
909:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
910:      MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_REUSE_MATRIX,&ptap->P_oth);
911:   }
912:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);
913:   MatGetLocalSize(p->B,NULL,&pon);
914:   pon *= dof;
915:   MatGetLocalSize(P,NULL,&pn);
916:   pn  *= dof;

918:   PetscCalloc2(ptap->c_rmti[pon],&c_rmtj,ptap->c_rmti[pon],&c_rmta);
919:   MatGetLocalSize(A,&am,NULL);
920:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
921:   pcstart *= dof;
922:   pcend   *= dof;
923:   cmaxr = 0;
924:   for (i=0; i<pon; i++) {
925:     cmaxr = PetscMax(cmaxr,ptap->c_rmti[i+1]-ptap->c_rmti[i]);
926:   }
927:   cd = (Mat_SeqAIJ*)(c->A)->data;
928:   co = (Mat_SeqAIJ*)(c->B)->data;
929:   for (i=0; i<pn; i++) {
930:     cmaxr = PetscMax(cmaxr,(cd->i[i+1]-cd->i[i])+(co->i[i+1]-co->i[i]));
931:   }
932:   PetscCalloc4(cmaxr,&apindices,cmaxr,&apvalues,cmaxr,&apvaluestmp,pon,&c_rmtc);
933:   PetscHMapIVCreate(&hmap);
934:   PetscHMapIVResize(hmap,cmaxr);
935:   ISGetIndices(map,&mappingindices);
936:   for (i=0; i<am && (pon || pn); i++) {
937:     PetscHMapIVClear(hmap);
938:     offset = i%dof;
939:     ii     = i/dof;
940:     nzi  = po->i[ii+1] - po->i[ii];
941:     dnzi = pd->i[ii+1] - pd->i[ii];
942:     if (!nzi && !dnzi) continue;
943:     MatPtAPNumericComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,hmap);
944:     voff = 0;
945:     PetscHMapIVGetPairs(hmap,&voff,apindices,apvalues);
946:     if (!voff) continue;

948:     /* Form remote C(ii, :) */
949:     poj = po->j + po->i[ii];
950:     poa = po->a + po->i[ii];
951:     for (j=0; j<nzi; j++) {
952:       pocol = poj[j]*dof+offset;
953:       c_rmtjj = c_rmtj + ptap->c_rmti[pocol];
954:       c_rmtaa = c_rmta + ptap->c_rmti[pocol];
955:       for (jj=0; jj<voff; jj++) {
956:         apvaluestmp[jj] = apvalues[jj]*poa[j];
957:         /*If the row is empty */
958:         if (!c_rmtc[pocol]) {
959:           c_rmtjj[jj] = apindices[jj];
960:           c_rmtaa[jj] = apvaluestmp[jj];
961:           c_rmtc[pocol]++;
962:         } else {
963:           PetscFindInt(apindices[jj],c_rmtc[pocol],c_rmtjj,&loc);
964:           if (loc>=0){ /* hit */
965:             c_rmtaa[loc] += apvaluestmp[jj];
966:             PetscLogFlops(1.0);
967:           } else { /* new element */
968:             loc = -(loc+1);
969:             /* Move data backward */
970:             for (kk=c_rmtc[pocol]; kk>loc; kk--) {
971:               c_rmtjj[kk] = c_rmtjj[kk-1];
972:               c_rmtaa[kk] = c_rmtaa[kk-1];
973:             }/* End kk */
974:             c_rmtjj[loc] = apindices[jj];
975:             c_rmtaa[loc] = apvaluestmp[jj];
976:             c_rmtc[pocol]++;
977:           }
978:         }
979:       } /* End jj */
980:       PetscLogFlops(voff);
981:     } /* End j */

983:     /* Form local C(ii, :) */
984:     pdj = pd->j + pd->i[ii];
985:     pda = pd->a + pd->i[ii];
986:     for (j=0; j<dnzi; j++) {
987:       row = pcstart + pdj[j] * dof + offset;
988:       for (jj=0; jj<voff; jj++) {
989:         apvaluestmp[jj] = apvalues[jj]*pda[j];
990:       }/* End kk */
991:       PetscLogFlops(voff);
992:       MatSetValues(C,1,&row,voff,apindices,apvaluestmp,ADD_VALUES);
993:     }/* End j */
994:   } /* End i */

996:   ISRestoreIndices(map,&mappingindices);
997:   PetscFree4(apindices,apvalues,apvaluestmp,c_rmtc);
998:   PetscHMapIVDestroy(&hmap);
999:   PetscCalloc2(ptap->c_othi[pn],&c_othj,ptap->c_othi[pn],&c_otha);

1001:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1002:   PetscSFReduceBegin(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
1003:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1004:   PetscSFReduceEnd(ptap->sf,MPIU_SCALAR,c_rmta,c_otha,MPIU_REPLACE);
1005:   PetscFree2(c_rmtj,c_rmta);

1007:   /* Add contributions from remote */
1008:   for (i = 0; i < pn; i++) {
1009:     row = i + pcstart;
1010:     MatSetValues(C,1,&row,ptap->c_othi[i+1]-ptap->c_othi[i],c_othj+ptap->c_othi[i],c_otha+ptap->c_othi[i],ADD_VALUES);
1011:   }
1012:   PetscFree2(c_othj,c_otha);

1014:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
1015:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

1017:   ptap->reuse = MAT_REUSE_MATRIX;
1018:   return(0);
1019: }

1021: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,Mat C)
1022: {
1023:   PetscErrorCode      ierr;


1027:   MatPtAPNumeric_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,C);
1028:   return(0);
1029: }

1031: /* TODO: move algorithm selection to MatProductSetFromOptions */
1032: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1033: {
1034:   Mat_APMPI           *ptap;
1035:   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1036:   MPI_Comm            comm;
1037:   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1038:   MatType             mtype;
1039:   PetscSF             sf;
1040:   PetscSFNode         *iremote;
1041:   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1042:   const PetscInt      *rootdegrees;
1043:   PetscHSetI          ht,oht,*hta,*hto;
1044:   PetscInt            pn,pon,*c_rmtc,i,j,nzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1045:   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1046:   PetscInt            nalg=2,alg=0,offset,ii;
1047:   PetscMPIInt         owner;
1048:   const PetscInt      *mappingindices;
1049:   PetscBool           flg;
1050:   const char          *algTypes[2] = {"overlapping","merged"};
1051:   IS                  map;
1052:   PetscErrorCode      ierr;

1055:   MatCheckProduct(Cmpi,5);
1056:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1057:   PetscObjectGetComm((PetscObject)A,&comm);

1059:   /* Create symbolic parallel matrix Cmpi */
1060:   MatGetLocalSize(P,NULL,&pn);
1061:   pn *= dof;
1062:   MatGetType(A,&mtype);
1063:   MatSetType(Cmpi,mtype);
1064:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);

1066:   PetscNew(&ptap);
1067:   ptap->reuse = MAT_INITIAL_MATRIX;
1068:   ptap->algType = 2;

1070:   /* Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1071:   MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);
1072:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);
1073:   /* This equals to the number of offdiag columns in P */
1074:   MatGetLocalSize(p->B,NULL,&pon);
1075:   pon *= dof;
1076:   /* offsets */
1077:   PetscMalloc1(pon+1,&ptap->c_rmti);
1078:   /* The number of columns we will send to remote ranks */
1079:   PetscMalloc1(pon,&c_rmtc);
1080:   PetscMalloc1(pon,&hta);
1081:   for (i=0; i<pon; i++) {
1082:     PetscHSetICreate(&hta[i]);
1083:   }
1084:   MatGetLocalSize(A,&am,NULL);
1085:   MatGetOwnershipRange(A,&arstart,&arend);
1086:   /* Create hash table to merge all columns for C(i, :) */
1087:   PetscHSetICreate(&ht);

1089:   ISGetIndices(map,&mappingindices);
1090:   ptap->c_rmti[0] = 0;
1091:   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1092:   for (i=0; i<am && pon; i++) {
1093:     /* Form one row of AP */
1094:     PetscHSetIClear(ht);
1095:     offset = i%dof;
1096:     ii     = i/dof;
1097:     /* If the off diag is empty, we should not do any calculation */
1098:     nzi = po->i[ii+1] - po->i[ii];
1099:     if (!nzi) continue;

1101:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,ht);
1102:     PetscHSetIGetSize(ht,&htsize);
1103:     /* If AP is empty, just continue */
1104:     if (!htsize) continue;
1105:     /* Form C(ii, :) */
1106:     poj = po->j + po->i[ii];
1107:     for (j=0; j<nzi; j++) {
1108:       PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);
1109:     }
1110:   }

1112:   for (i=0; i<pon; i++) {
1113:     PetscHSetIGetSize(hta[i],&htsize);
1114:     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1115:     c_rmtc[i] = htsize;
1116:   }

1118:   PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);

1120:   for (i=0; i<pon; i++) {
1121:     off = 0;
1122:     PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);
1123:     PetscHSetIDestroy(&hta[i]);
1124:   }
1125:   PetscFree(hta);

1127:   PetscMalloc1(pon,&iremote);
1128:   for (i=0; i<pon; i++) {
1129:     owner = 0; lidx = 0;
1130:     offset = i%dof;
1131:     ii     = i/dof;
1132:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);
1133:     iremote[i].index = lidx*dof + offset;
1134:     iremote[i].rank  = owner;
1135:   }

1137:   PetscSFCreate(comm,&sf);
1138:   PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1139:   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1140:   PetscSFSetRankOrder(sf,PETSC_TRUE);
1141:   PetscSFSetFromOptions(sf);
1142:   PetscSFSetUp(sf);
1143:   /* How many neighbors have contributions to my rows? */
1144:   PetscSFComputeDegreeBegin(sf,&rootdegrees);
1145:   PetscSFComputeDegreeEnd(sf,&rootdegrees);
1146:   rootspacesize = 0;
1147:   for (i = 0; i < pn; i++) {
1148:     rootspacesize += rootdegrees[i];
1149:   }
1150:   PetscMalloc1(rootspacesize,&rootspace);
1151:   PetscMalloc1(rootspacesize+1,&rootspaceoffsets);
1152:   /* Get information from leaves
1153:    * Number of columns other people contribute to my rows
1154:    * */
1155:   PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);
1156:   PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);
1157:   PetscFree(c_rmtc);
1158:   PetscCalloc1(pn+1,&ptap->c_othi);
1159:   /* The number of columns is received for each row */
1160:   ptap->c_othi[0] = 0;
1161:   rootspacesize = 0;
1162:   rootspaceoffsets[0] = 0;
1163:   for (i = 0; i < pn; i++) {
1164:     rcvncols = 0;
1165:     for (j = 0; j<rootdegrees[i]; j++) {
1166:       rcvncols += rootspace[rootspacesize];
1167:       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1168:       rootspacesize++;
1169:     }
1170:     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1171:   }
1172:   PetscFree(rootspace);

1174:   PetscMalloc1(pon,&c_rmtoffsets);
1175:   PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1176:   PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1177:   PetscSFDestroy(&sf);
1178:   PetscFree(rootspaceoffsets);

1180:   PetscCalloc1(ptap->c_rmti[pon],&iremote);
1181:   nleaves = 0;
1182:   for (i = 0; i<pon; i++) {
1183:     owner = 0;
1184:     ii = i/dof;
1185:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);
1186:     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1187:     for (j=0; j<sendncols; j++) {
1188:       iremote[nleaves].rank = owner;
1189:       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1190:     }
1191:   }
1192:   PetscFree(c_rmtoffsets);
1193:   PetscCalloc1(ptap->c_othi[pn],&c_othj);

1195:   PetscSFCreate(comm,&ptap->sf);
1196:   PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1197:   PetscSFSetFromOptions(ptap->sf);
1198:   /* One to one map */
1199:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);

1201:   PetscMalloc2(pn,&dnz,pn,&onz);
1202:   PetscHSetICreate(&oht);
1203:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
1204:   pcstart *= dof;
1205:   pcend   *= dof;
1206:   PetscMalloc2(pn,&hta,pn,&hto);
1207:   for (i=0; i<pn; i++) {
1208:     PetscHSetICreate(&hta[i]);
1209:     PetscHSetICreate(&hto[i]);
1210:   }
1211:   /* Work on local part */
1212:   /* 4) Pass 1: Estimate memory for C_loc */
1213:   for (i=0; i<am && pn; i++) {
1214:     PetscHSetIClear(ht);
1215:     PetscHSetIClear(oht);
1216:     offset = i%dof;
1217:     ii     = i/dof;
1218:     nzi = pd->i[ii+1] - pd->i[ii];
1219:     if (!nzi) continue;

1221:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);
1222:     PetscHSetIGetSize(ht,&htsize);
1223:     PetscHSetIGetSize(oht,&htosize);
1224:     if (!(htsize+htosize)) continue;
1225:     /* Form C(ii, :) */
1226:     pdj = pd->j + pd->i[ii];
1227:     for (j=0; j<nzi; j++) {
1228:       PetscHSetIUpdate(hta[pdj[j]*dof+offset],ht);
1229:       PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);
1230:     }
1231:   }

1233:   ISRestoreIndices(map,&mappingindices);

1235:   PetscHSetIDestroy(&ht);
1236:   PetscHSetIDestroy(&oht);

1238:   /* Get remote data */
1239:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1240:   PetscFree(c_rmtj);

1242:   for (i = 0; i < pn; i++) {
1243:     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1244:     rdj = c_othj + ptap->c_othi[i];
1245:     for (j = 0; j < nzi; j++) {
1246:       col = rdj[j];
1247:       /* diag part */
1248:       if (col>=pcstart && col<pcend) {
1249:         PetscHSetIAdd(hta[i],col);
1250:       } else { /* off diag */
1251:         PetscHSetIAdd(hto[i],col);
1252:       }
1253:     }
1254:     PetscHSetIGetSize(hta[i],&htsize);
1255:     dnz[i] = htsize;
1256:     PetscHSetIDestroy(&hta[i]);
1257:     PetscHSetIGetSize(hto[i],&htsize);
1258:     onz[i] = htsize;
1259:     PetscHSetIDestroy(&hto[i]);
1260:   }

1262:   PetscFree2(hta,hto);
1263:   PetscFree(c_othj);

1265:   /* local sizes and preallocation */
1266:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1267:   MatSetBlockSizes(Cmpi,dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);
1268:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1269:   MatSetUp(Cmpi);
1270:   PetscFree2(dnz,onz);

1272:   /* attach the supporting struct to Cmpi for reuse */
1273:   Cmpi->product->data    = ptap;
1274:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1275:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1277:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1278:   Cmpi->assembled = PETSC_FALSE;
1279:   /* pick an algorithm */
1280:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");
1281:   alg = 0;
1282:   PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);
1283:   PetscOptionsEnd();
1284:   switch (alg) {
1285:     case 0:
1286:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1287:       break;
1288:     case 1:
1289:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1290:       break;
1291:     default:
1292:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1293:   }
1294:   return(0);
1295: }

1297: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat A,Mat P,PetscReal fill,Mat C)
1298: {

1302:   MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce(A,P,1,fill,C);
1303:   return(0);
1304: }

1306: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(Mat A,Mat P,PetscInt dof,PetscReal fill,Mat Cmpi)
1307: {
1308:   Mat_APMPI           *ptap;
1309:   Mat_MPIAIJ          *p=(Mat_MPIAIJ*)P->data;
1310:   MPI_Comm            comm;
1311:   Mat_SeqAIJ          *pd=(Mat_SeqAIJ*)p->A->data,*po=(Mat_SeqAIJ*)p->B->data;
1312:   MatType             mtype;
1313:   PetscSF             sf;
1314:   PetscSFNode         *iremote;
1315:   PetscInt            rootspacesize,*rootspace,*rootspaceoffsets,nleaves;
1316:   const PetscInt      *rootdegrees;
1317:   PetscHSetI          ht,oht,*hta,*hto,*htd;
1318:   PetscInt            pn,pon,*c_rmtc,i,j,nzi,dnzi,htsize,htosize,*c_rmtj,off,*c_othj,rcvncols,sendncols,*c_rmtoffsets;
1319:   PetscInt            lidx,*rdj,col,pcstart,pcend,*dnz,*onz,am,arstart,arend,*poj,*pdj;
1320:   PetscInt            nalg=2,alg=0,offset,ii;
1321:   PetscMPIInt         owner;
1322:   PetscBool           flg;
1323:   const char          *algTypes[2] = {"merged","overlapping"};
1324:   const PetscInt      *mappingindices;
1325:   IS                  map;
1326:   PetscErrorCode      ierr;

1329:   MatCheckProduct(Cmpi,5);
1330:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1331:   PetscObjectGetComm((PetscObject)A,&comm);

1333:   /* Create symbolic parallel matrix Cmpi */
1334:   MatGetLocalSize(P,NULL,&pn);
1335:   pn *= dof;
1336:   MatGetType(A,&mtype);
1337:   MatSetType(Cmpi,mtype);
1338:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);

1340:   PetscNew(&ptap);
1341:   ptap->reuse = MAT_INITIAL_MATRIX;
1342:   ptap->algType = 3;

1344:   /* 0) Get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1345:   MatGetBrowsOfAcols_MPIXAIJ(A,P,dof,MAT_INITIAL_MATRIX,&ptap->P_oth);
1346:   PetscObjectQuery((PetscObject)ptap->P_oth,"aoffdiagtopothmapping",(PetscObject*)&map);

1348:   /* This equals to the number of offdiag columns in P */
1349:   MatGetLocalSize(p->B,NULL,&pon);
1350:   pon *= dof;
1351:   /* offsets */
1352:   PetscMalloc1(pon+1,&ptap->c_rmti);
1353:   /* The number of columns we will send to remote ranks */
1354:   PetscMalloc1(pon,&c_rmtc);
1355:   PetscMalloc1(pon,&hta);
1356:   for (i=0; i<pon; i++) {
1357:     PetscHSetICreate(&hta[i]);
1358:   }
1359:   MatGetLocalSize(A,&am,NULL);
1360:   MatGetOwnershipRange(A,&arstart,&arend);
1361:   /* Create hash table to merge all columns for C(i, :) */
1362:   PetscHSetICreate(&ht);
1363:   PetscHSetICreate(&oht);
1364:   PetscMalloc2(pn,&htd,pn,&hto);
1365:   for (i=0; i<pn; i++) {
1366:     PetscHSetICreate(&htd[i]);
1367:     PetscHSetICreate(&hto[i]);
1368:   }

1370:   ISGetIndices(map,&mappingindices);
1371:   ptap->c_rmti[0] = 0;
1372:   /* 2) Pass 1: calculate the size for C_rmt (a matrix need to be sent to other processors)  */
1373:   for (i=0; i<am && (pon || pn); i++) {
1374:     /* Form one row of AP */
1375:     PetscHSetIClear(ht);
1376:     PetscHSetIClear(oht);
1377:     offset = i%dof;
1378:     ii     = i/dof;
1379:     /* If the off diag is empty, we should not do any calculation */
1380:     nzi = po->i[ii+1] - po->i[ii];
1381:     dnzi = pd->i[ii+1] - pd->i[ii];
1382:     if (!nzi && !dnzi) continue;

1384:     MatPtAPSymbolicComputeOneRowOfAP_private(A,P,ptap->P_oth,mappingindices,dof,i,ht,oht);
1385:     PetscHSetIGetSize(ht,&htsize);
1386:     PetscHSetIGetSize(oht,&htosize);
1387:     /* If AP is empty, just continue */
1388:     if (!(htsize+htosize)) continue;

1390:     /* Form remote C(ii, :) */
1391:     poj = po->j + po->i[ii];
1392:     for (j=0; j<nzi; j++) {
1393:       PetscHSetIUpdate(hta[poj[j]*dof+offset],ht);
1394:       PetscHSetIUpdate(hta[poj[j]*dof+offset],oht);
1395:     }

1397:     /* Form local C(ii, :) */
1398:     pdj = pd->j + pd->i[ii];
1399:     for (j=0; j<dnzi; j++) {
1400:       PetscHSetIUpdate(htd[pdj[j]*dof+offset],ht);
1401:       PetscHSetIUpdate(hto[pdj[j]*dof+offset],oht);
1402:     }
1403:   }

1405:   ISRestoreIndices(map,&mappingindices);

1407:   PetscHSetIDestroy(&ht);
1408:   PetscHSetIDestroy(&oht);

1410:   for (i=0; i<pon; i++) {
1411:     PetscHSetIGetSize(hta[i],&htsize);
1412:     ptap->c_rmti[i+1] = ptap->c_rmti[i] + htsize;
1413:     c_rmtc[i] = htsize;
1414:   }

1416:   PetscMalloc1(ptap->c_rmti[pon],&c_rmtj);

1418:   for (i=0; i<pon; i++) {
1419:     off = 0;
1420:     PetscHSetIGetElems(hta[i],&off,c_rmtj+ptap->c_rmti[i]);
1421:     PetscHSetIDestroy(&hta[i]);
1422:   }
1423:   PetscFree(hta);

1425:   PetscMalloc1(pon,&iremote);
1426:   for (i=0; i<pon; i++) {
1427:     owner = 0; lidx = 0;
1428:     offset = i%dof;
1429:     ii     = i/dof;
1430:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,&lidx);
1431:     iremote[i].index = lidx*dof+offset;
1432:     iremote[i].rank  = owner;
1433:   }

1435:   PetscSFCreate(comm,&sf);
1436:   PetscSFSetGraph(sf,pn,pon,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1437:   /* Reorder ranks properly so that the data handled by gather and scatter have the same order */
1438:   PetscSFSetRankOrder(sf,PETSC_TRUE);
1439:   PetscSFSetFromOptions(sf);
1440:   PetscSFSetUp(sf);
1441:   /* How many neighbors have contributions to my rows? */
1442:   PetscSFComputeDegreeBegin(sf,&rootdegrees);
1443:   PetscSFComputeDegreeEnd(sf,&rootdegrees);
1444:   rootspacesize = 0;
1445:   for (i = 0; i < pn; i++) {
1446:     rootspacesize += rootdegrees[i];
1447:   }
1448:   PetscMalloc1(rootspacesize,&rootspace);
1449:   PetscMalloc1(rootspacesize+1,&rootspaceoffsets);
1450:   /* Get information from leaves
1451:    * Number of columns other people contribute to my rows
1452:    * */
1453:   PetscSFGatherBegin(sf,MPIU_INT,c_rmtc,rootspace);
1454:   PetscSFGatherEnd(sf,MPIU_INT,c_rmtc,rootspace);
1455:   PetscFree(c_rmtc);
1456:   PetscMalloc1(pn+1,&ptap->c_othi);
1457:   /* The number of columns is received for each row */
1458:   ptap->c_othi[0]     = 0;
1459:   rootspacesize       = 0;
1460:   rootspaceoffsets[0] = 0;
1461:   for (i = 0; i < pn; i++) {
1462:     rcvncols = 0;
1463:     for (j = 0; j<rootdegrees[i]; j++) {
1464:       rcvncols += rootspace[rootspacesize];
1465:       rootspaceoffsets[rootspacesize+1] = rootspaceoffsets[rootspacesize] + rootspace[rootspacesize];
1466:       rootspacesize++;
1467:     }
1468:     ptap->c_othi[i+1] = ptap->c_othi[i] + rcvncols;
1469:   }
1470:   PetscFree(rootspace);

1472:   PetscMalloc1(pon,&c_rmtoffsets);
1473:   PetscSFScatterBegin(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1474:   PetscSFScatterEnd(sf,MPIU_INT,rootspaceoffsets,c_rmtoffsets);
1475:   PetscSFDestroy(&sf);
1476:   PetscFree(rootspaceoffsets);

1478:   PetscCalloc1(ptap->c_rmti[pon],&iremote);
1479:   nleaves = 0;
1480:   for (i = 0; i<pon; i++) {
1481:     owner = 0;
1482:     ii    = i/dof;
1483:     PetscLayoutFindOwnerIndex(P->cmap,p->garray[ii],&owner,NULL);
1484:     sendncols = ptap->c_rmti[i+1] - ptap->c_rmti[i];
1485:     for (j=0; j<sendncols; j++) {
1486:       iremote[nleaves].rank    = owner;
1487:       iremote[nleaves++].index = c_rmtoffsets[i] + j;
1488:     }
1489:   }
1490:   PetscFree(c_rmtoffsets);
1491:   PetscCalloc1(ptap->c_othi[pn],&c_othj);

1493:   PetscSFCreate(comm,&ptap->sf);
1494:   PetscSFSetGraph(ptap->sf,ptap->c_othi[pn],nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
1495:   PetscSFSetFromOptions(ptap->sf);
1496:   /* One to one map */
1497:   PetscSFReduceBegin(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1498:   /* Get remote data */
1499:   PetscSFReduceEnd(ptap->sf,MPIU_INT,c_rmtj,c_othj,MPIU_REPLACE);
1500:   PetscFree(c_rmtj);
1501:   PetscMalloc2(pn,&dnz,pn,&onz);
1502:   MatGetOwnershipRangeColumn(P,&pcstart,&pcend);
1503:   pcstart *= dof;
1504:   pcend   *= dof;
1505:   for (i = 0; i < pn; i++) {
1506:     nzi = ptap->c_othi[i+1] - ptap->c_othi[i];
1507:     rdj = c_othj + ptap->c_othi[i];
1508:     for (j = 0; j < nzi; j++) {
1509:       col =  rdj[j];
1510:       /* diag part */
1511:       if (col>=pcstart && col<pcend) {
1512:         PetscHSetIAdd(htd[i],col);
1513:       } else { /* off diag */
1514:         PetscHSetIAdd(hto[i],col);
1515:       }
1516:     }
1517:     PetscHSetIGetSize(htd[i],&htsize);
1518:     dnz[i] = htsize;
1519:     PetscHSetIDestroy(&htd[i]);
1520:     PetscHSetIGetSize(hto[i],&htsize);
1521:     onz[i] = htsize;
1522:     PetscHSetIDestroy(&hto[i]);
1523:   }

1525:   PetscFree2(htd,hto);
1526:   PetscFree(c_othj);

1528:   /* local sizes and preallocation */
1529:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1530:   MatSetBlockSizes(Cmpi, dof>1? dof: P->cmap->bs,dof>1? dof: P->cmap->bs);
1531:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1532:   PetscFree2(dnz,onz);

1534:   /* attach the supporting struct to Cmpi for reuse */
1535:   Cmpi->product->data    = ptap;
1536:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1537:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1539:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1540:   Cmpi->assembled = PETSC_FALSE;
1541:   /* pick an algorithm */
1542:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MatPtAP","Mat");
1543:   alg = 0;
1544:   PetscOptionsEList("-matptap_allatonce_via","PtAP allatonce numeric approach","MatPtAP",algTypes,nalg,algTypes[alg],&alg,&flg);
1545:   PetscOptionsEnd();
1546:   switch (alg) {
1547:     case 0:
1548:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged;
1549:       break;
1550:     case 1:
1551:       Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce;
1552:       break;
1553:     default:
1554:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," Unsupported allatonce numerical algorithm \n");
1555:   }
1556:   return(0);
1557: }

1559: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat A,Mat P,PetscReal fill,Mat C)
1560: {

1564:   MatPtAPSymbolic_MPIAIJ_MPIXAIJ_allatonce_merged(A,P,1,fill,C);
1565:   return(0);
1566: }

1568: PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat A,Mat P,PetscReal fill,Mat Cmpi)
1569: {
1570:   PetscErrorCode      ierr;
1571:   Mat_APMPI           *ptap;
1572:   Mat_MPIAIJ          *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1573:   MPI_Comm            comm;
1574:   PetscMPIInt         size,rank;
1575:   PetscFreeSpaceList  free_space=NULL,current_space=NULL;
1576:   PetscInt            am=A->rmap->n,pm=P->rmap->n,pN=P->cmap->N,pn=P->cmap->n;
1577:   PetscInt            *lnk,i,k,pnz,row,nsend;
1578:   PetscBT             lnkbt;
1579:   PetscMPIInt         tagi,tagj,*len_si,*len_s,*len_ri,icompleted=0,nrecv;
1580:   PetscInt            **buf_rj,**buf_ri,**buf_ri_k;
1581:   PetscInt            len,proc,*dnz,*onz,*owners,nzi,nspacedouble;
1582:   PetscInt            nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextci;
1583:   MPI_Request         *swaits,*rwaits;
1584:   MPI_Status          *sstatus,rstatus;
1585:   PetscLayout         rowmap;
1586:   PetscInt            *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
1587:   PetscMPIInt         *len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
1588:   PetscInt            *api,*apj,*Jptr,apnz,*prmap=p->garray,con,j,ap_rmax=0,Crmax,*aj,*ai,*pi;
1589:   Mat_SeqAIJ          *p_loc,*p_oth=NULL,*ad=(Mat_SeqAIJ*)(a->A)->data,*ao=NULL,*c_loc,*c_oth;
1590:   PetscScalar         *apv;
1591:   PetscTable          ta;
1592:   MatType             mtype;
1593:   const char          *prefix;
1594: #if defined(PETSC_USE_INFO)
1595:   PetscReal           apfill;
1596: #endif

1599:   MatCheckProduct(Cmpi,4);
1600:   if (Cmpi->product->data) SETERRQ(PetscObjectComm((PetscObject)Cmpi),PETSC_ERR_PLIB,"Product data not empty");
1601:   PetscObjectGetComm((PetscObject)A,&comm);
1602:   MPI_Comm_size(comm,&size);
1603:   MPI_Comm_rank(comm,&rank);

1605:   if (size > 1) ao = (Mat_SeqAIJ*)(a->B)->data;

1607:   /* create symbolic parallel matrix Cmpi */
1608:   MatGetType(A,&mtype);
1609:   MatSetType(Cmpi,mtype);

1611:   /* Do dense axpy in MatPtAPNumeric_MPIAIJ_MPIAIJ() */
1612:   Cmpi->ops->ptapnumeric = MatPtAPNumeric_MPIAIJ_MPIAIJ;

1614:   /* create struct Mat_APMPI and attached it to C later */
1615:   PetscNew(&ptap);
1616:   ptap->reuse = MAT_INITIAL_MATRIX;
1617:   ptap->algType = 1;

1619:   /* get P_oth by taking rows of P (= non-zero cols of local A) from other processors */
1620:   MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_INITIAL_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
1621:   /* get P_loc by taking all local rows of P */
1622:   MatMPIAIJGetLocalMat(P,MAT_INITIAL_MATRIX,&ptap->P_loc);

1624:   /* (0) compute Rd = Pd^T, Ro = Po^T  */
1625:   /* --------------------------------- */
1626:   MatTranspose(p->A,MAT_INITIAL_MATRIX,&ptap->Rd);
1627:   MatTranspose(p->B,MAT_INITIAL_MATRIX,&ptap->Ro);

1629:   /* (1) compute symbolic AP = A_loc*P = Ad*P_loc + Ao*P_oth (api,apj) */
1630:   /* ----------------------------------------------------------------- */
1631:   p_loc  = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1632:   if (ptap->P_oth) p_oth  = (Mat_SeqAIJ*)(ptap->P_oth)->data;

1634:   /* create and initialize a linked list */
1635:   PetscTableCreate(pn,pN,&ta); /* for compute AP_loc and Cmpi */
1636:   MatRowMergeMax_SeqAIJ(p_loc,ptap->P_loc->rmap->N,ta);
1637:   MatRowMergeMax_SeqAIJ(p_oth,ptap->P_oth->rmap->N,ta);
1638:   PetscTableGetCount(ta,&Crmax); /* Crmax = nnz(sum of Prows) */
1639:   /* printf("[%d] est %d, Crmax %d; pN %d\n",rank,5*(p_loc->rmax+p_oth->rmax + (PetscInt)(1.e-2*pN)),Crmax,pN); */

1641:   PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);

1643:   /* Initial FreeSpace size is fill*(nnz(A) + nnz(P)) */
1644:   if (ao) {
1645:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],PetscIntSumTruncate(ao->i[am],p_loc->i[pm]))),&free_space);
1646:   } else {
1647:     PetscFreeSpaceGet(PetscRealIntMultTruncate(fill,PetscIntSumTruncate(ad->i[am],p_loc->i[pm])),&free_space);
1648:   }
1649:   current_space = free_space;
1650:   nspacedouble  = 0;

1652:   PetscMalloc1(am+1,&api);
1653:   api[0] = 0;
1654:   for (i=0; i<am; i++) {
1655:     /* diagonal portion: Ad[i,:]*P */
1656:     ai = ad->i; pi = p_loc->i;
1657:     nzi = ai[i+1] - ai[i];
1658:     aj  = ad->j + ai[i];
1659:     for (j=0; j<nzi; j++) {
1660:       row  = aj[j];
1661:       pnz  = pi[row+1] - pi[row];
1662:       Jptr = p_loc->j + pi[row];
1663:       /* add non-zero cols of P into the sorted linked list lnk */
1664:       PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);
1665:     }
1666:     /* off-diagonal portion: Ao[i,:]*P */
1667:     if (ao) {
1668:       ai = ao->i; pi = p_oth->i;
1669:       nzi = ai[i+1] - ai[i];
1670:       aj  = ao->j + ai[i];
1671:       for (j=0; j<nzi; j++) {
1672:         row  = aj[j];
1673:         pnz  = pi[row+1] - pi[row];
1674:         Jptr = p_oth->j + pi[row];
1675:         PetscLLCondensedAddSorted(pnz,Jptr,lnk,lnkbt);
1676:       }
1677:     }
1678:     apnz     = lnk[0];
1679:     api[i+1] = api[i] + apnz;
1680:     if (ap_rmax < apnz) ap_rmax = apnz;

1682:     /* if free space is not available, double the total space in the list */
1683:     if (current_space->local_remaining<apnz) {
1684:       PetscFreeSpaceGet(PetscIntSumTruncate(apnz,current_space->total_array_size),&current_space);
1685:       nspacedouble++;
1686:     }

1688:     /* Copy data into free space, then initialize lnk */
1689:     PetscLLCondensedClean(pN,apnz,current_space->array,lnk,lnkbt);

1691:     current_space->array           += apnz;
1692:     current_space->local_used      += apnz;
1693:     current_space->local_remaining -= apnz;
1694:   }
1695:   /* Allocate space for apj and apv, initialize apj, and */
1696:   /* destroy list of free space and other temporary array(s) */
1697:   PetscMalloc2(api[am],&apj,api[am],&apv);
1698:   PetscFreeSpaceContiguous(&free_space,apj);
1699:   PetscLLDestroy(lnk,lnkbt);

1701:   /* Create AP_loc for reuse */
1702:   MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,pN,api,apj,apv,&ptap->AP_loc);

1704: #if defined(PETSC_USE_INFO)
1705:   if (ao) {
1706:     apfill = (PetscReal)api[am]/(ad->i[am]+ao->i[am]+p_loc->i[pm]+1);
1707:   } else {
1708:     apfill = (PetscReal)api[am]/(ad->i[am]+p_loc->i[pm]+1);
1709:   }
1710:   ptap->AP_loc->info.mallocs           = nspacedouble;
1711:   ptap->AP_loc->info.fill_ratio_given  = fill;
1712:   ptap->AP_loc->info.fill_ratio_needed = apfill;

1714:   if (api[am]) {
1715:     PetscInfo3(ptap->AP_loc,"Nonscalable algorithm, AP_loc reallocs %D; Fill ratio: given %g needed %g.\n",nspacedouble,(double)fill,(double)apfill);
1716:     PetscInfo1(ptap->AP_loc,"Use MatPtAP(A,B,MatReuse,%g,&C) for best AP_loc performance.;\n",(double)apfill);
1717:   } else {
1718:     PetscInfo(ptap->AP_loc,"Nonscalable algorithm, AP_loc is empty \n");
1719:   }
1720: #endif

1722:   /* (2-1) compute symbolic Co = Ro*AP_loc  */
1723:   /* ------------------------------------ */
1724:   MatGetOptionsPrefix(A,&prefix);
1725:   MatSetOptionsPrefix(ptap->Ro,prefix);
1726:   MatAppendOptionsPrefix(ptap->Ro,"inner_offdiag_");

1728:   MatProductCreate(ptap->Ro,ptap->AP_loc,NULL,&ptap->C_oth);
1729:   MatGetOptionsPrefix(Cmpi,&prefix);
1730:   MatSetOptionsPrefix(ptap->C_oth,prefix);
1731:   MatAppendOptionsPrefix(ptap->C_oth,"inner_C_oth_");

1733:   MatProductSetType(ptap->C_oth,MATPRODUCT_AB);
1734:   MatProductSetAlgorithm(ptap->C_oth,"default");
1735:   MatProductSetFill(ptap->C_oth,fill);
1736:   MatProductSetFromOptions(ptap->C_oth);
1737:   MatProductSymbolic(ptap->C_oth);

1739:   /* (3) send coj of C_oth to other processors  */
1740:   /* ------------------------------------------ */
1741:   /* determine row ownership */
1742:   PetscLayoutCreate(comm,&rowmap);
1743:   rowmap->n  = pn;
1744:   rowmap->bs = 1;
1745:   PetscLayoutSetUp(rowmap);
1746:   owners = rowmap->range;

1748:   /* determine the number of messages to send, their lengths */
1749:   PetscMalloc4(size,&len_s,size,&len_si,size,&sstatus,size+2,&owners_co);
1750:   PetscArrayzero(len_s,size);
1751:   PetscArrayzero(len_si,size);

1753:   c_oth = (Mat_SeqAIJ*)ptap->C_oth->data;
1754:   coi   = c_oth->i; coj = c_oth->j;
1755:   con   = ptap->C_oth->rmap->n;
1756:   proc  = 0;
1757:   for (i=0; i<con; i++) {
1758:     while (prmap[i] >= owners[proc+1]) proc++;
1759:     len_si[proc]++;               /* num of rows in Co(=Pt*AP) to be sent to [proc] */
1760:     len_s[proc] += coi[i+1] - coi[i]; /* num of nonzeros in Co to be sent to [proc] */
1761:   }

1763:   len          = 0; /* max length of buf_si[], see (4) */
1764:   owners_co[0] = 0;
1765:   nsend        = 0;
1766:   for (proc=0; proc<size; proc++) {
1767:     owners_co[proc+1] = owners_co[proc] + len_si[proc];
1768:     if (len_s[proc]) {
1769:       nsend++;
1770:       len_si[proc] = 2*(len_si[proc] + 1); /* length of buf_si to be sent to [proc] */
1771:       len         += len_si[proc];
1772:     }
1773:   }

1775:   /* determine the number and length of messages to receive for coi and coj  */
1776:   PetscGatherNumberOfMessages(comm,NULL,len_s,&nrecv);
1777:   PetscGatherMessageLengths2(comm,nsend,nrecv,len_s,len_si,&id_r,&len_r,&len_ri);

1779:   /* post the Irecv and Isend of coj */
1780:   PetscCommGetNewTag(comm,&tagj);
1781:   PetscPostIrecvInt(comm,tagj,nrecv,id_r,len_r,&buf_rj,&rwaits);
1782:   PetscMalloc1(nsend+1,&swaits);
1783:   for (proc=0, k=0; proc<size; proc++) {
1784:     if (!len_s[proc]) continue;
1785:     i    = owners_co[proc];
1786:     MPI_Isend(coj+coi[i],len_s[proc],MPIU_INT,proc,tagj,comm,swaits+k);
1787:     k++;
1788:   }

1790:   /* (2-2) compute symbolic C_loc = Rd*AP_loc */
1791:   /* ---------------------------------------- */
1792:   MatSetOptionsPrefix(ptap->Rd,prefix);
1793:   MatAppendOptionsPrefix(ptap->Rd,"inner_diag_");

1795:   MatProductCreate(ptap->Rd,ptap->AP_loc,NULL,&ptap->C_loc);
1796:   MatGetOptionsPrefix(Cmpi,&prefix);
1797:   MatSetOptionsPrefix(ptap->C_loc,prefix);
1798:   MatAppendOptionsPrefix(ptap->C_loc,"inner_C_loc_");

1800:   MatProductSetType(ptap->C_loc,MATPRODUCT_AB);
1801:   MatProductSetAlgorithm(ptap->C_loc,"default");
1802:   MatProductSetFill(ptap->C_loc,fill);
1803:   MatProductSetFromOptions(ptap->C_loc);
1804:   MatProductSymbolic(ptap->C_loc);

1806:   c_loc = (Mat_SeqAIJ*)ptap->C_loc->data;

1808:   /* receives coj are complete */
1809:   for (i=0; i<nrecv; i++) {
1810:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
1811:   }
1812:   PetscFree(rwaits);
1813:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

1815:   /* add received column indices into ta to update Crmax */
1816:   for (k=0; k<nrecv; k++) {/* k-th received message */
1817:     Jptr = buf_rj[k];
1818:     for (j=0; j<len_r[k]; j++) {
1819:       PetscTableAdd(ta,*(Jptr+j)+1,1,INSERT_VALUES);
1820:     }
1821:   }
1822:   PetscTableGetCount(ta,&Crmax);
1823:   PetscTableDestroy(&ta);

1825:   /* (4) send and recv coi */
1826:   /*-----------------------*/
1827:   PetscCommGetNewTag(comm,&tagi);
1828:   PetscPostIrecvInt(comm,tagi,nrecv,id_r,len_ri,&buf_ri,&rwaits);
1829:   PetscMalloc1(len+1,&buf_s);
1830:   buf_si = buf_s;  /* points to the beginning of k-th msg to be sent */
1831:   for (proc=0,k=0; proc<size; proc++) {
1832:     if (!len_s[proc]) continue;
1833:     /* form outgoing message for i-structure:
1834:          buf_si[0]:                 nrows to be sent
1835:                [1:nrows]:           row index (global)
1836:                [nrows+1:2*nrows+1]: i-structure index
1837:     */
1838:     /*-------------------------------------------*/
1839:     nrows       = len_si[proc]/2 - 1; /* num of rows in Co to be sent to [proc] */
1840:     buf_si_i    = buf_si + nrows+1;
1841:     buf_si[0]   = nrows;
1842:     buf_si_i[0] = 0;
1843:     nrows       = 0;
1844:     for (i=owners_co[proc]; i<owners_co[proc+1]; i++) {
1845:       nzi = coi[i+1] - coi[i];
1846:       buf_si_i[nrows+1] = buf_si_i[nrows] + nzi;  /* i-structure */
1847:       buf_si[nrows+1]   = prmap[i] -owners[proc]; /* local row index */
1848:       nrows++;
1849:     }
1850:     MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,swaits+k);
1851:     k++;
1852:     buf_si += len_si[proc];
1853:   }
1854:   for (i=0; i<nrecv; i++) {
1855:     MPI_Waitany(nrecv,rwaits,&icompleted,&rstatus);
1856:   }
1857:   PetscFree(rwaits);
1858:   if (nsend) {MPI_Waitall(nsend,swaits,sstatus);}

1860:   PetscFree4(len_s,len_si,sstatus,owners_co);
1861:   PetscFree(len_ri);
1862:   PetscFree(swaits);
1863:   PetscFree(buf_s);

1865:   /* (5) compute the local portion of Cmpi      */
1866:   /* ------------------------------------------ */
1867:   /* set initial free space to be Crmax, sufficient for holding nozeros in each row of Cmpi */
1868:   PetscFreeSpaceGet(Crmax,&free_space);
1869:   current_space = free_space;

1871:   PetscMalloc3(nrecv,&buf_ri_k,nrecv,&nextrow,nrecv,&nextci);
1872:   for (k=0; k<nrecv; k++) {
1873:     buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */
1874:     nrows       = *buf_ri_k[k];
1875:     nextrow[k]  = buf_ri_k[k] + 1;  /* next row number of k-th recved i-structure */
1876:     nextci[k]   = buf_ri_k[k] + (nrows + 1); /* poins to the next i-structure of k-th recved i-structure  */
1877:   }

1879:   MatPreallocateInitialize(comm,pn,pn,dnz,onz);
1880:   PetscLLCondensedCreate(Crmax,pN,&lnk,&lnkbt);
1881:   for (i=0; i<pn; i++) {
1882:     /* add C_loc into Cmpi */
1883:     nzi  = c_loc->i[i+1] - c_loc->i[i];
1884:     Jptr = c_loc->j + c_loc->i[i];
1885:     PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);

1887:     /* add received col data into lnk */
1888:     for (k=0; k<nrecv; k++) { /* k-th received message */
1889:       if (i == *nextrow[k]) { /* i-th row */
1890:         nzi  = *(nextci[k]+1) - *nextci[k];
1891:         Jptr = buf_rj[k] + *nextci[k];
1892:         PetscLLCondensedAddSorted(nzi,Jptr,lnk,lnkbt);
1893:         nextrow[k]++; nextci[k]++;
1894:       }
1895:     }
1896:     nzi = lnk[0];

1898:     /* copy data into free space, then initialize lnk */
1899:     PetscLLCondensedClean(pN,nzi,current_space->array,lnk,lnkbt);
1900:     MatPreallocateSet(i+owners[rank],nzi,current_space->array,dnz,onz);
1901:   }
1902:   PetscFree3(buf_ri_k,nextrow,nextci);
1903:   PetscLLDestroy(lnk,lnkbt);
1904:   PetscFreeSpaceDestroy(free_space);

1906:   /* local sizes and preallocation */
1907:   MatSetSizes(Cmpi,pn,pn,PETSC_DETERMINE,PETSC_DETERMINE);
1908:   if (P->cmap->bs > 0) {
1909:     PetscLayoutSetBlockSize(Cmpi->rmap,P->cmap->bs);
1910:     PetscLayoutSetBlockSize(Cmpi->cmap,P->cmap->bs);
1911:   }
1912:   MatMPIAIJSetPreallocation(Cmpi,0,dnz,0,onz);
1913:   MatPreallocateFinalize(dnz,onz);

1915:   /* members in merge */
1916:   PetscFree(id_r);
1917:   PetscFree(len_r);
1918:   PetscFree(buf_ri[0]);
1919:   PetscFree(buf_ri);
1920:   PetscFree(buf_rj[0]);
1921:   PetscFree(buf_rj);
1922:   PetscLayoutDestroy(&rowmap);

1924:   PetscCalloc1(pN,&ptap->apa);

1926:   /* attach the supporting struct to Cmpi for reuse */
1927:   Cmpi->product->data    = ptap;
1928:   Cmpi->product->destroy = MatDestroy_MPIAIJ_PtAP;
1929:   Cmpi->product->view    = MatView_MPIAIJ_PtAP;

1931:   /* Cmpi is not ready for use - assembly will be done by MatPtAPNumeric() */
1932:   Cmpi->assembled = PETSC_FALSE;
1933:   return(0);
1934: }

1936: PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat A,Mat P,Mat C)
1937: {
1938:   PetscErrorCode    ierr;
1939:   Mat_MPIAIJ        *a=(Mat_MPIAIJ*)A->data,*p=(Mat_MPIAIJ*)P->data;
1940:   Mat_SeqAIJ        *ad=(Mat_SeqAIJ*)(a->A)->data,*ao=(Mat_SeqAIJ*)(a->B)->data;
1941:   Mat_SeqAIJ        *ap,*p_loc,*p_oth=NULL,*c_seq;
1942:   Mat_APMPI         *ptap;
1943:   Mat               AP_loc,C_loc,C_oth;
1944:   PetscInt          i,rstart,rend,cm,ncols,row;
1945:   PetscInt          *api,*apj,am = A->rmap->n,j,col,apnz;
1946:   PetscScalar       *apa;
1947:   const PetscInt    *cols;
1948:   const PetscScalar *vals;

1951:   MatCheckProduct(C,3);
1952:   ptap = (Mat_APMPI*)C->product->data;
1953:   if (!ptap) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be computed. Missing data");
1954:   if (!ptap->AP_loc) SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_ARG_WRONGSTATE,"PtAP cannot be reused. Do not call MatProductClear()");

1956:   MatZeroEntries(C);
1957:   /* 1) get R = Pd^T,Ro = Po^T */
1958:   if (ptap->reuse == MAT_REUSE_MATRIX) {
1959:     MatTranspose(p->A,MAT_REUSE_MATRIX,&ptap->Rd);
1960:     MatTranspose(p->B,MAT_REUSE_MATRIX,&ptap->Ro);
1961:   }

1963:   /* 2) get AP_loc */
1964:   AP_loc = ptap->AP_loc;
1965:   ap = (Mat_SeqAIJ*)AP_loc->data;

1967:   /* 2-1) get P_oth = ptap->P_oth  and P_loc = ptap->P_loc */
1968:   /*-----------------------------------------------------*/
1969:   if (ptap->reuse == MAT_REUSE_MATRIX) {
1970:     /* P_oth and P_loc are obtained in MatPtASymbolic() when reuse == MAT_INITIAL_MATRIX */
1971:     MatGetBrowsOfAoCols_MPIAIJ(A,P,MAT_REUSE_MATRIX,&ptap->startsj_s,&ptap->startsj_r,&ptap->bufa,&ptap->P_oth);
1972:     MatMPIAIJGetLocalMat(P,MAT_REUSE_MATRIX,&ptap->P_loc);
1973:   }

1975:   /* 2-2) compute numeric A_loc*P - dominating part */
1976:   /* ---------------------------------------------- */
1977:   /* get data from symbolic products */
1978:   p_loc = (Mat_SeqAIJ*)(ptap->P_loc)->data;
1979:   if (ptap->P_oth) {
1980:     p_oth = (Mat_SeqAIJ*)(ptap->P_oth)->data;
1981:   }
1982:   apa   = ptap->apa;
1983:   api   = ap->i;
1984:   apj   = ap->j;
1985:   for (i=0; i<am; i++) {
1986:     /* AP[i,:] = A[i,:]*P = Ad*P_loc Ao*P_oth */
1987:     AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa);
1988:     apnz = api[i+1] - api[i];
1989:     for (j=0; j<apnz; j++) {
1990:       col = apj[j+api[i]];
1991:       ap->a[j+ap->i[i]] = apa[col];
1992:       apa[col] = 0.0;
1993:     }
1994:   }
1995:   /* We have modified the contents of local matrix AP_loc and must increase its ObjectState, since we are not doing AssemblyBegin/End on it. */
1996:   PetscObjectStateIncrease((PetscObject)AP_loc);

1998:   /* 3) C_loc = Rd*AP_loc, C_oth = Ro*AP_loc */
1999:   ((ptap->C_loc)->ops->matmultnumeric)(ptap->Rd,AP_loc,ptap->C_loc);
2000:   ((ptap->C_oth)->ops->matmultnumeric)(ptap->Ro,AP_loc,ptap->C_oth);
2001:   C_loc = ptap->C_loc;
2002:   C_oth = ptap->C_oth;

2004:   /* add C_loc and Co to to C */
2005:   MatGetOwnershipRange(C,&rstart,&rend);

2007:   /* C_loc -> C */
2008:   cm    = C_loc->rmap->N;
2009:   c_seq = (Mat_SeqAIJ*)C_loc->data;
2010:   cols = c_seq->j;
2011:   vals = c_seq->a;


2014:   /* The (fast) MatSetValues_MPIAIJ_CopyFromCSRFormat function can only be used when C->was_assembled is PETSC_FALSE and */
2015:   /* when there are no off-processor parts.  */
2016:   /* If was_assembled is true, then the statement aj[rowstart_diag+dnz_row] = mat_j[col] - cstart; in MatSetValues_MPIAIJ_CopyFromCSRFormat */
2017:   /* is no longer true. Then the more complex function MatSetValues_MPIAIJ() has to be used, where the column index is looked up from */
2018:   /* a table, and other, more complex stuff has to be done. */
2019:   if (C->assembled) {
2020:     C->was_assembled = PETSC_TRUE;
2021:     C->assembled     = PETSC_FALSE;
2022:   }
2023:   if (C->was_assembled) {
2024:     for (i=0; i<cm; i++) {
2025:       ncols = c_seq->i[i+1] - c_seq->i[i];
2026:       row = rstart + i;
2027:       MatSetValues_MPIAIJ(C,1,&row,ncols,cols,vals,ADD_VALUES);
2028:       cols += ncols; vals += ncols;
2029:     }
2030:   } else {
2031:     MatSetValues_MPIAIJ_CopyFromCSRFormat(C,c_seq->j,c_seq->i,c_seq->a);
2032:   }

2034:   /* Co -> C, off-processor part */
2035:   cm = C_oth->rmap->N;
2036:   c_seq = (Mat_SeqAIJ*)C_oth->data;
2037:   cols = c_seq->j;
2038:   vals = c_seq->a;
2039:   for (i=0; i<cm; i++) {
2040:     ncols = c_seq->i[i+1] - c_seq->i[i];
2041:     row = p->garray[i];
2042:     MatSetValues(C,1,&row,ncols,cols,vals,ADD_VALUES);
2043:     cols += ncols; vals += ncols;
2044:   }

2046:   MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);
2047:   MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);

2049:   ptap->reuse = MAT_REUSE_MATRIX;
2050:   return(0);
2051: }

2053: PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat C)
2054: {
2055:   PetscErrorCode      ierr;
2056:   Mat_Product         *product = C->product;
2057:   Mat                 A=product->A,P=product->B;
2058:   MatProductAlgorithm alg=product->alg;
2059:   PetscReal           fill=product->fill;
2060:   PetscBool           flg;

2063:   /* scalable: do R=P^T locally, then C=R*A*P */
2064:   PetscStrcmp(alg,"scalable",&flg);
2065:   if (flg) {
2066:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(A,P,product->fill,C);
2067:     C->ops->productnumeric = MatProductNumeric_PtAP;
2068:     goto next;
2069:   }

2071:   /* nonscalable: do R=P^T locally, then C=R*A*P */
2072:   PetscStrcmp(alg,"nonscalable",&flg);
2073:   if (flg) {
2074:     MatPtAPSymbolic_MPIAIJ_MPIAIJ(A,P,fill,C);
2075:     goto next;
2076:   }

2078:   /* allatonce */
2079:   PetscStrcmp(alg,"allatonce",&flg);
2080:   if (flg) {
2081:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(A,P,fill,C);
2082:     goto next;
2083:   }

2085:   /* allatonce_merged */
2086:   PetscStrcmp(alg,"allatonce_merged",&flg);
2087:   if (flg) {
2088:     MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(A,P,fill,C);
2089:     goto next;
2090:   }

2092:   /* hypre */
2093: #if defined(PETSC_HAVE_HYPRE)
2094:   PetscStrcmp(alg,"hypre",&flg);
2095:   if (flg) {
2096:     MatPtAPSymbolic_AIJ_AIJ_wHYPRE(A,P,fill,C);
2097:     C->ops->productnumeric = MatProductNumeric_PtAP;
2098:     return(0);
2099:   }
2100: #endif
2101:   SETERRQ(PetscObjectComm((PetscObject)C),PETSC_ERR_SUP,"Mat Product Algorithm is not supported");

2103: next:
2104:   C->ops->productnumeric = MatProductNumeric_PtAP;
2105:   return(0);
2106: }