Actual source code: matelem.cxx

petsc-master 2020-07-09
Report Typos and Errors
  1:  #include <petsc/private/petscelemental.h>

  3: /*
  4:     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
  5:   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
  6: */
  7: static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;

  9: static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
 10: {
 12:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
 13:   PetscBool      iascii;

 16:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 17:   if (iascii) {
 18:     PetscViewerFormat format;
 19:     PetscViewerGetFormat(viewer,&format);
 20:     if (format == PETSC_VIEWER_ASCII_INFO) {
 21:       /* call elemental viewing function */
 22:       PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");
 23:       PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());
 24:       PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());
 25:       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
 26:         /* call elemental viewing function */
 27:         PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");
 28:       }

 30:     } else if (format == PETSC_VIEWER_DEFAULT) {
 31:       PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 32:       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
 33:       PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 34:       if (A->factortype == MAT_FACTOR_NONE){
 35:         Mat Adense;
 36:         MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 37:         MatView(Adense,viewer);
 38:         MatDestroy(&Adense);
 39:       }
 40:     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
 41:   } else {
 42:     /* convert to dense format and call MatView() */
 43:     Mat Adense;
 44:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);
 45:     MatView(Adense,viewer);
 46:     MatDestroy(&Adense);
 47:   }
 48:   return(0);
 49: }

 51: static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
 52: {
 53:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 56:   info->block_size = 1.0;

 58:   if (flag == MAT_LOCAL) {
 59:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 60:     info->nz_used        = info->nz_allocated;
 61:   } else if (flag == MAT_GLOBAL_MAX) {
 62:     //MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));
 63:     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
 64:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
 65:   } else if (flag == MAT_GLOBAL_SUM) {
 66:     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
 67:     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
 68:     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
 69:     //MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));
 70:     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
 71:   }

 73:   info->nz_unneeded       = 0.0;
 74:   info->assemblies        = A->num_ass;
 75:   info->mallocs           = 0;
 76:   info->memory            = ((PetscObject)A)->mem;
 77:   info->fill_ratio_given  = 0; /* determined by Elemental */
 78:   info->fill_ratio_needed = 0;
 79:   info->factor_mallocs    = 0;
 80:   return(0);
 81: }

 83: PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
 84: {
 85:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

 88:   switch (op) {
 89:   case MAT_NEW_NONZERO_LOCATIONS:
 90:   case MAT_NEW_NONZERO_LOCATION_ERR:
 91:   case MAT_NEW_NONZERO_ALLOCATION_ERR:
 92:   case MAT_SYMMETRIC:
 93:   case MAT_SORTED_FULL:
 94:   case MAT_HERMITIAN:
 95:     break;
 96:   case MAT_ROW_ORIENTED:
 97:     a->roworiented = flg;
 98:     break;
 99:   default:
100:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
101:   }
102:   return(0);
103: }

105: static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
106: {
107:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
108:   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;

111:   // TODO: Initialize matrix to all zeros?

113:   // Count the number of queues from this process
114:   if (a->roworiented) {
115:     for (i=0; i<nr; i++) {
116:       if (rows[i] < 0) continue;
117:       P2RO(A,0,rows[i],&rrank,&ridx);
118:       RO2E(A,0,rrank,ridx,&erow);
119:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
120:       for (j=0; j<nc; j++) {
121:         if (cols[j] < 0) continue;
122:         P2RO(A,1,cols[j],&crank,&cidx);
123:         RO2E(A,1,crank,cidx,&ecol);
124:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
125:         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
126:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
127:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
128:           ++numQueues;
129:           continue;
130:         }
131:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
132:         switch (imode) {
133:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
134:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
135:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
136:         }
137:       }
138:     }

140:     /* printf("numQueues=%d\n",numQueues); */
141:     a->emat->Reserve( numQueues );
142:     for (i=0; i<nr; i++) {
143:       if (rows[i] < 0) continue;
144:       P2RO(A,0,rows[i],&rrank,&ridx);
145:       RO2E(A,0,rrank,ridx,&erow);
146:       for (j=0; j<nc; j++) {
147:         if (cols[j] < 0) continue;
148:         P2RO(A,1,cols[j],&crank,&cidx);
149:         RO2E(A,1,crank,cidx,&ecol);
150:         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
151:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
152:           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
153:         }
154:       }
155:     }
156:   } else { /* columnoriented */
157:     for (j=0; j<nc; j++) {
158:       if (cols[j] < 0) continue;
159:       P2RO(A,1,cols[j],&crank,&cidx);
160:       RO2E(A,1,crank,cidx,&ecol);
161:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162:       for (i=0; i<nr; i++) {
163:         if (rows[i] < 0) continue;
164:         P2RO(A,0,rows[i],&rrank,&ridx);
165:         RO2E(A,0,rrank,ridx,&erow);
166:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167:         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
168:           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
169:           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
170:           ++numQueues;
171:           continue;
172:         }
173:         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
174:         switch (imode) {
175:         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
176:         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
177:         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
178:         }
179:       }
180:     }

182:     /* printf("numQueues=%d\n",numQueues); */
183:     a->emat->Reserve( numQueues );
184:     for (j=0; j<nc; j++) {
185:       if (cols[j] < 0) continue;
186:       P2RO(A,1,cols[j],&crank,&cidx);
187:       RO2E(A,1,crank,cidx,&ecol);

189:       for (i=0; i<nr; i++) {
190:         if (rows[i] < 0) continue;
191:         P2RO(A,0,rows[i],&rrank,&ridx);
192:         RO2E(A,0,rrank,ridx,&erow);
193:         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
194:           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
195:           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
196:         }
197:       }
198:     }
199:   }
200:   return(0);
201: }

203: static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
204: {
205:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
206:   PetscErrorCode        ierr;
207:   const PetscElemScalar *x;
208:   PetscElemScalar       *y;
209:   PetscElemScalar       one = 1,zero = 0;

212:   VecGetArrayRead(X,(const PetscScalar **)&x);
213:   VecGetArray(Y,(PetscScalar **)&y);
214:   { /* Scoping so that constructor is called before pointer is returned */
215:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
216:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
217:     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
218:     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
219:   }
220:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
221:   VecRestoreArray(Y,(PetscScalar **)&y);
222:   return(0);
223: }

225: static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
226: {
227:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
228:   PetscErrorCode        ierr;
229:   const PetscElemScalar *x;
230:   PetscElemScalar       *y;
231:   PetscElemScalar       one = 1,zero = 0;

234:   VecGetArrayRead(X,(const PetscScalar **)&x);
235:   VecGetArray(Y,(PetscScalar **)&y);
236:   { /* Scoping so that constructor is called before pointer is returned */
237:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
238:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
239:     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
240:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
241:   }
242:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
243:   VecRestoreArray(Y,(PetscScalar **)&y);
244:   return(0);
245: }

247: static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
248: {
249:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
250:   PetscErrorCode        ierr;
251:   const PetscElemScalar *x;
252:   PetscElemScalar       *z;
253:   PetscElemScalar       one = 1;

256:   if (Y != Z) {VecCopy(Y,Z);}
257:   VecGetArrayRead(X,(const PetscScalar **)&x);
258:   VecGetArray(Z,(PetscScalar **)&z);
259:   { /* Scoping so that constructor is called before pointer is returned */
260:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
261:     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
262:     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
263:     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
264:   }
265:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
266:   VecRestoreArray(Z,(PetscScalar **)&z);
267:   return(0);
268: }

270: static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
271: {
272:   Mat_Elemental         *a = (Mat_Elemental*)A->data;
273:   PetscErrorCode        ierr;
274:   const PetscElemScalar *x;
275:   PetscElemScalar       *z;
276:   PetscElemScalar       one = 1;

279:   if (Y != Z) {VecCopy(Y,Z);}
280:   VecGetArrayRead(X,(const PetscScalar **)&x);
281:   VecGetArray(Z,(PetscScalar **)&z);
282:   { /* Scoping so that constructor is called before pointer is returned */
283:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
284:     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
285:     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
286:     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
287:   }
288:   VecRestoreArrayRead(X,(const PetscScalar **)&x);
289:   VecRestoreArray(Z,(PetscScalar **)&z);
290:   return(0);
291: }

293: PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
294: {
295:   Mat_Elemental    *a = (Mat_Elemental*)A->data;
296:   Mat_Elemental    *b = (Mat_Elemental*)B->data;
297:   Mat_Elemental    *c = (Mat_Elemental*)C->data;
298:   PetscElemScalar  one = 1,zero = 0;

301:   { /* Scoping so that constructor is called before pointer is returned */
302:     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
303:   }
304:   C->assembled = PETSC_TRUE;
305:   return(0);
306: }

308: PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
309: {

313:   MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
314:   MatSetType(Ce,MATELEMENTAL);
315:   MatSetUp(Ce);
316:   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
317:   return(0);
318: }

320: static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
321: {
322:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
323:   Mat_Elemental      *b = (Mat_Elemental*)B->data;
324:   Mat_Elemental      *c = (Mat_Elemental*)C->data;
325:   PetscElemScalar    one = 1,zero = 0;

328:   { /* Scoping so that constructor is called before pointer is returned */
329:     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
330:   }
331:   C->assembled = PETSC_TRUE;
332:   return(0);
333: }

335: static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
336: {

340:   MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
341:   MatSetType(C,MATELEMENTAL);
342:   MatSetUp(C);
343:   return(0);
344: }

346: /* --------------------------------------- */
347: static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
348: {
350:   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
351:   C->ops->productsymbolic = MatProductSymbolic_AB;
352:   return(0);
353: }

355: static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
356: {
358:   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
359:   C->ops->productsymbolic          = MatProductSymbolic_ABt;
360:   return(0);
361: }

363: PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
364: {
366:   Mat_Product    *product = C->product;

369:   switch (product->type) {
370:   case MATPRODUCT_AB:
371:     MatProductSetFromOptions_Elemental_AB(C);
372:     break;
373:   case MATPRODUCT_ABt:
374:     MatProductSetFromOptions_Elemental_ABt(C);
375:     break;
376:   default:
377:     break;
378:   }
379:   return(0);
380: }
381: /* --------------------------------------- */

383: static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
384: {
385:   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
386:   Mat_Elemental   *a = (Mat_Elemental*)A->data;
387:   PetscErrorCode  ierr;
388:   PetscElemScalar v;
389:   MPI_Comm        comm;

392:   PetscObjectGetComm((PetscObject)A,&comm);
393:   MatGetSize(A,&nrows,&ncols);
394:   nD = nrows>ncols ? ncols : nrows;
395:   for (i=0; i<nD; i++) {
396:     PetscInt erow,ecol;
397:     P2RO(A,0,i,&rrank,&ridx);
398:     RO2E(A,0,rrank,ridx,&erow);
399:     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
400:     P2RO(A,1,i,&crank,&cidx);
401:     RO2E(A,1,crank,cidx,&ecol);
402:     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
403:     v = a->emat->Get(erow,ecol);
404:     VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);
405:   }
406:   VecAssemblyBegin(D);
407:   VecAssemblyEnd(D);
408:   return(0);
409: }

411: static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
412: {
413:   Mat_Elemental         *x = (Mat_Elemental*)X->data;
414:   const PetscElemScalar *d;
415:   PetscErrorCode        ierr;

418:   if (R) {
419:     VecGetArrayRead(R,(const PetscScalar **)&d);
420:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
421:     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
422:     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
423:     VecRestoreArrayRead(R,(const PetscScalar **)&d);
424:   }
425:   if (L) {
426:     VecGetArrayRead(L,(const PetscScalar **)&d);
427:     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
428:     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
429:     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
430:     VecRestoreArrayRead(L,(const PetscScalar **)&d);
431:   }
432:   return(0);
433: }

435: static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
436: {
438:   *missing = PETSC_FALSE;
439:   return(0);
440: }

442: static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
443: {
444:   Mat_Elemental  *x = (Mat_Elemental*)X->data;

447:   El::Scale((PetscElemScalar)a,*x->emat);
448:   return(0);
449: }

451: /*
452:   MatAXPY - Computes Y = a*X + Y.
453: */
454: static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
455: {
456:   Mat_Elemental  *x = (Mat_Elemental*)X->data;
457:   Mat_Elemental  *y = (Mat_Elemental*)Y->data;

461:   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
462:   PetscObjectStateIncrease((PetscObject)Y);
463:   return(0);
464: }

466: static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
467: {
468:   Mat_Elemental *a=(Mat_Elemental*)A->data;
469:   Mat_Elemental *b=(Mat_Elemental*)B->data;

473:   El::Copy(*a->emat,*b->emat);
474:   PetscObjectStateIncrease((PetscObject)B);
475:   return(0);
476: }

478: static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
479: {
480:   Mat            Be;
481:   MPI_Comm       comm;
482:   Mat_Elemental  *a=(Mat_Elemental*)A->data;

486:   PetscObjectGetComm((PetscObject)A,&comm);
487:   MatCreate(comm,&Be);
488:   MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
489:   MatSetType(Be,MATELEMENTAL);
490:   MatSetUp(Be);
491:   *B = Be;
492:   if (op == MAT_COPY_VALUES) {
493:     Mat_Elemental *b=(Mat_Elemental*)Be->data;
494:     El::Copy(*a->emat,*b->emat);
495:   }
496:   Be->assembled = PETSC_TRUE;
497:   return(0);
498: }

500: static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
501: {
502:   Mat            Be = *B;
504:   MPI_Comm       comm;
505:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

508:   PetscObjectGetComm((PetscObject)A,&comm);
509:   /* Only out-of-place supported */
510:   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
511:   if (reuse == MAT_INITIAL_MATRIX) {
512:     MatCreate(comm,&Be);
513:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
514:     MatSetType(Be,MATELEMENTAL);
515:     MatSetUp(Be);
516:     *B = Be;
517:   }
518:   b = (Mat_Elemental*)Be->data;
519:   El::Transpose(*a->emat,*b->emat);
520:   Be->assembled = PETSC_TRUE;
521:   return(0);
522: }

524: static PetscErrorCode MatConjugate_Elemental(Mat A)
525: {
526:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

529:   El::Conjugate(*a->emat);
530:   return(0);
531: }

533: static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
534: {
535:   Mat            Be = *B;
537:   MPI_Comm       comm;
538:   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;

541:   PetscObjectGetComm((PetscObject)A,&comm);
542:   /* Only out-of-place supported */
543:   if (reuse == MAT_INITIAL_MATRIX){
544:     MatCreate(comm,&Be);
545:     MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);
546:     MatSetType(Be,MATELEMENTAL);
547:     MatSetUp(Be);
548:     *B = Be;
549:   }
550:   b = (Mat_Elemental*)Be->data;
551:   El::Adjoint(*a->emat,*b->emat);
552:   Be->assembled = PETSC_TRUE;
553:   return(0);
554: }

556: static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
557: {
558:   Mat_Elemental     *a = (Mat_Elemental*)A->data;
559:   PetscErrorCode    ierr;
560:   PetscElemScalar   *x;
561:   PetscInt          pivoting = a->pivoting;

564:   VecCopy(B,X);
565:   VecGetArray(X,(PetscScalar **)&x);

567:   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
568:   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
569:   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
570:   switch (A->factortype) {
571:   case MAT_FACTOR_LU:
572:     if (pivoting == 0) {
573:       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
574:     } else if (pivoting == 1) {
575:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
576:     } else { /* pivoting == 2 */
577:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
578:     }
579:     break;
580:   case MAT_FACTOR_CHOLESKY:
581:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
582:     break;
583:   default:
584:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
585:     break;
586:   }
587:   El::Copy(xer,xe);

589:   VecRestoreArray(X,(PetscScalar **)&x);
590:   return(0);
591: }

593: static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
594: {
595:   PetscErrorCode    ierr;

598:   MatSolve_Elemental(A,B,X);
599:   VecAXPY(X,1,Y);
600:   return(0);
601: }

603: static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
604: {
605:   Mat_Elemental *a=(Mat_Elemental*)A->data;
606:   Mat_Elemental *b=(Mat_Elemental*)B->data;
607:   Mat_Elemental *x=(Mat_Elemental*)X->data;
608:   PetscInt      pivoting = a->pivoting;

611:   El::Copy(*b->emat,*x->emat);
612:   switch (A->factortype) {
613:   case MAT_FACTOR_LU:
614:     if (pivoting == 0) {
615:       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
616:     } else if (pivoting == 1) {
617:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
618:     } else {
619:       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
620:     }
621:     break;
622:   case MAT_FACTOR_CHOLESKY:
623:     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
624:     break;
625:   default:
626:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
627:     break;
628:   }
629:   return(0);
630: }

632: static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
633: {
634:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
636:   PetscInt       pivoting = a->pivoting;

639:   if (pivoting == 0) {
640:     El::LU(*a->emat);
641:   } else if (pivoting == 1) {
642:     El::LU(*a->emat,*a->P);
643:   } else {
644:     El::LU(*a->emat,*a->P,*a->Q);
645:   }
646:   A->factortype = MAT_FACTOR_LU;
647:   A->assembled  = PETSC_TRUE;

649:   PetscFree(A->solvertype);
650:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
651:   return(0);
652: }

654: static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
655: {

659:   MatCopy(A,F,SAME_NONZERO_PATTERN);
660:   MatLUFactor_Elemental(F,0,0,info);
661:   return(0);
662: }

664: static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
665: {
667:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
668:   return(0);
669: }

671: static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
672: {
673:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
674:   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;

678:   El::Cholesky(El::UPPER,*a->emat);
679:   A->factortype = MAT_FACTOR_CHOLESKY;
680:   A->assembled  = PETSC_TRUE;

682:   PetscFree(A->solvertype);
683:   PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);
684:   return(0);
685: }

687: static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
688: {

692:   MatCopy(A,F,SAME_NONZERO_PATTERN);
693:   MatCholeskyFactor_Elemental(F,0,info);
694:   return(0);
695: }

697: static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
698: {
700:   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
701:   return(0);
702: }

704: PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
705: {
707:   *type = MATSOLVERELEMENTAL;
708:   return(0);
709: }

711: static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
712: {
713:   Mat            B;

717:   /* Create the factorization matrix */
718:   MatCreate(PetscObjectComm((PetscObject)A),&B);
719:   MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
720:   MatSetType(B,MATELEMENTAL);
721:   MatSetUp(B);
722:   B->factortype = ftype;
723:   PetscFree(B->solvertype);
724:   PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);

726:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);
727:   *F            = B;
728:   return(0);
729: }

731: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
732: {

736:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);
737:   MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);
738:   return(0);
739: }

741: static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
742: {
743:   Mat_Elemental *a=(Mat_Elemental*)A->data;

746:   switch (type){
747:   case NORM_1:
748:     *nrm = El::OneNorm(*a->emat);
749:     break;
750:   case NORM_FROBENIUS:
751:     *nrm = El::FrobeniusNorm(*a->emat);
752:     break;
753:   case NORM_INFINITY:
754:     *nrm = El::InfinityNorm(*a->emat);
755:     break;
756:   default:
757:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
758:   }
759:   return(0);
760: }

762: static PetscErrorCode MatZeroEntries_Elemental(Mat A)
763: {
764:   Mat_Elemental *a=(Mat_Elemental*)A->data;

767:   El::Zero(*a->emat);
768:   return(0);
769: }

771: static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
772: {
773:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
775:   PetscInt       i,m,shift,stride,*idx;

778:   if (rows) {
779:     m = a->emat->LocalHeight();
780:     shift = a->emat->ColShift();
781:     stride = a->emat->ColStride();
782:     PetscMalloc1(m,&idx);
783:     for (i=0; i<m; i++) {
784:       PetscInt rank,offset;
785:       E2RO(A,0,shift+i*stride,&rank,&offset);
786:       RO2P(A,0,rank,offset,&idx[i]);
787:     }
788:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);
789:   }
790:   if (cols) {
791:     m = a->emat->LocalWidth();
792:     shift = a->emat->RowShift();
793:     stride = a->emat->RowStride();
794:     PetscMalloc1(m,&idx);
795:     for (i=0; i<m; i++) {
796:       PetscInt rank,offset;
797:       E2RO(A,1,shift+i*stride,&rank,&offset);
798:       RO2P(A,1,rank,offset,&idx[i]);
799:     }
800:     ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);
801:   }
802:   return(0);
803: }

805: static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
806: {
807:   Mat                Bmpi;
808:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
809:   MPI_Comm           comm;
810:   PetscErrorCode     ierr;
811:   IS                 isrows,iscols;
812:   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
813:   const PetscInt     *rows,*cols;
814:   PetscElemScalar    v;
815:   const El::Grid     &grid = a->emat->Grid();

818:   PetscObjectGetComm((PetscObject)A,&comm);

820:   if (reuse == MAT_REUSE_MATRIX) {
821:     Bmpi = *B;
822:   } else {
823:     MatCreate(comm,&Bmpi);
824:     MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
825:     MatSetType(Bmpi,MATDENSE);
826:     MatSetUp(Bmpi);
827:   }

829:   /* Get local entries of A */
830:   MatGetOwnershipIS(A,&isrows,&iscols);
831:   ISGetLocalSize(isrows,&nrows);
832:   ISGetIndices(isrows,&rows);
833:   ISGetLocalSize(iscols,&ncols);
834:   ISGetIndices(iscols,&cols);

836:   if (a->roworiented) {
837:     for (i=0; i<nrows; i++) {
838:       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
839:       RO2E(A,0,rrank,ridx,&erow);
840:       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
841:       for (j=0; j<ncols; j++) {
842:         P2RO(A,1,cols[j],&crank,&cidx);
843:         RO2E(A,1,crank,cidx,&ecol);
844:         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");

846:         elrow = erow / grid.MCSize(); /* Elemental local row index */
847:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
848:         v = a->emat->GetLocal(elrow,elcol);
849:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
850:       }
851:     }
852:   } else { /* column-oriented */
853:     for (j=0; j<ncols; j++) {
854:       P2RO(A,1,cols[j],&crank,&cidx);
855:       RO2E(A,1,crank,cidx,&ecol);
856:       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
857:       for (i=0; i<nrows; i++) {
858:         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
859:         RO2E(A,0,rrank,ridx,&erow);
860:         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");

862:         elrow = erow / grid.MCSize(); /* Elemental local row index */
863:         elcol = ecol / grid.MRSize(); /* Elemental local column index */
864:         v = a->emat->GetLocal(elrow,elcol);
865:         MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);
866:       }
867:     }
868:   }
869:   MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);
870:   MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);
871:   if (reuse == MAT_INPLACE_MATRIX) {
872:     MatHeaderReplace(A,&Bmpi);
873:   } else {
874:     *B = Bmpi;
875:   }
876:   ISDestroy(&isrows);
877:   ISDestroy(&iscols);
878:   return(0);
879: }

881: PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
882: {
883:   Mat               mat_elemental;
884:   PetscErrorCode    ierr;
885:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
886:   const PetscInt    *cols;
887:   const PetscScalar *vals;

890:   if (reuse == MAT_REUSE_MATRIX) {
891:     mat_elemental = *newmat;
892:     MatZeroEntries(mat_elemental);
893:   } else {
894:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
895:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
896:     MatSetType(mat_elemental,MATELEMENTAL);
897:     MatSetUp(mat_elemental);
898:   }
899:   for (row=0; row<M; row++) {
900:     MatGetRow(A,row,&ncols,&cols,&vals);
901:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
902:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
903:     MatRestoreRow(A,row,&ncols,&cols,&vals);
904:   }
905:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
906:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

908:   if (reuse == MAT_INPLACE_MATRIX) {
909:     MatHeaderReplace(A,&mat_elemental);
910:   } else {
911:     *newmat = mat_elemental;
912:   }
913:   return(0);
914: }

916: PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
917: {
918:   Mat               mat_elemental;
919:   PetscErrorCode    ierr;
920:   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
921:   const PetscInt    *cols;
922:   const PetscScalar *vals;

925:   if (reuse == MAT_REUSE_MATRIX) {
926:     mat_elemental = *newmat;
927:     MatZeroEntries(mat_elemental);
928:   } else {
929:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
930:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);
931:     MatSetType(mat_elemental,MATELEMENTAL);
932:     MatSetUp(mat_elemental);
933:   }
934:   for (row=rstart; row<rend; row++) {
935:     MatGetRow(A,row,&ncols,&cols,&vals);
936:     for (j=0; j<ncols; j++) {
937:       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
938:       MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);
939:     }
940:     MatRestoreRow(A,row,&ncols,&cols,&vals);
941:   }
942:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
943:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

945:   if (reuse == MAT_INPLACE_MATRIX) {
946:     MatHeaderReplace(A,&mat_elemental);
947:   } else {
948:     *newmat = mat_elemental;
949:   }
950:   return(0);
951: }

953: PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
954: {
955:   Mat               mat_elemental;
956:   PetscErrorCode    ierr;
957:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
958:   const PetscInt    *cols;
959:   const PetscScalar *vals;

962:   if (reuse == MAT_REUSE_MATRIX) {
963:     mat_elemental = *newmat;
964:     MatZeroEntries(mat_elemental);
965:   } else {
966:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
967:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
968:     MatSetType(mat_elemental,MATELEMENTAL);
969:     MatSetUp(mat_elemental);
970:   }
971:   MatGetRowUpperTriangular(A);
972:   for (row=0; row<M; row++) {
973:     MatGetRow(A,row,&ncols,&cols,&vals);
974:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
975:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
976:     for (j=0; j<ncols; j++) { /* lower triangular part */
977:       PetscScalar v;
978:       if (cols[j] == row) continue;
979:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
980:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
981:     }
982:     MatRestoreRow(A,row,&ncols,&cols,&vals);
983:   }
984:   MatRestoreRowUpperTriangular(A);
985:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
986:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

988:   if (reuse == MAT_INPLACE_MATRIX) {
989:     MatHeaderReplace(A,&mat_elemental);
990:   } else {
991:     *newmat = mat_elemental;
992:   }
993:   return(0);
994: }

996: PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
997: {
998:   Mat               mat_elemental;
999:   PetscErrorCode    ierr;
1000:   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1001:   const PetscInt    *cols;
1002:   const PetscScalar *vals;

1005:   if (reuse == MAT_REUSE_MATRIX) {
1006:     mat_elemental = *newmat;
1007:     MatZeroEntries(mat_elemental);
1008:   } else {
1009:     MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);
1010:     MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);
1011:     MatSetType(mat_elemental,MATELEMENTAL);
1012:     MatSetUp(mat_elemental);
1013:   }
1014:   MatGetRowUpperTriangular(A);
1015:   for (row=rstart; row<rend; row++) {
1016:     MatGetRow(A,row,&ncols,&cols,&vals);
1017:     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1018:     MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);
1019:     for (j=0; j<ncols; j++) { /* lower triangular part */
1020:       PetscScalar v;
1021:       if (cols[j] == row) continue;
1022:       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1023:       MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);
1024:     }
1025:     MatRestoreRow(A,row,&ncols,&cols,&vals);
1026:   }
1027:   MatRestoreRowUpperTriangular(A);
1028:   MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);
1029:   MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);

1031:   if (reuse == MAT_INPLACE_MATRIX) {
1032:     MatHeaderReplace(A,&mat_elemental);
1033:   } else {
1034:     *newmat = mat_elemental;
1035:   }
1036:   return(0);
1037: }

1039: static PetscErrorCode MatDestroy_Elemental(Mat A)
1040: {
1041:   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1042:   PetscErrorCode     ierr;
1043:   Mat_Elemental_Grid *commgrid;
1044:   PetscBool          flg;
1045:   MPI_Comm           icomm;

1048:   delete a->emat;
1049:   delete a->P;
1050:   delete a->Q;

1052:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1053:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1054:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1055:   if (--commgrid->grid_refct == 0) {
1056:     delete commgrid->grid;
1057:     PetscFree(commgrid);
1058:     MPI_Comm_free_keyval(&Petsc_Elemental_keyval);
1059:   }
1060:   PetscCommDestroy(&icomm);
1061:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);
1062:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
1063:   PetscFree(A->data);
1064:   return(0);
1065: }

1067: PetscErrorCode MatSetUp_Elemental(Mat A)
1068: {
1069:   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1071:   MPI_Comm       comm;
1072:   PetscMPIInt    rsize,csize;
1073:   PetscInt       n;

1076:   PetscLayoutSetUp(A->rmap);
1077:   PetscLayoutSetUp(A->cmap);

1079:   /* Check if local row and column sizes are equally distributed.
1080:      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1081:      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1082:      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1083:   PetscObjectGetComm((PetscObject)A,&comm);
1084:   n = PETSC_DECIDE;
1085:   PetscSplitOwnership(comm,&n,&A->rmap->N);
1086:   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);

1088:   n = PETSC_DECIDE;
1089:   PetscSplitOwnership(comm,&n,&A->cmap->N);
1090:   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);

1092:   a->emat->Resize(A->rmap->N,A->cmap->N);
1093:   El::Zero(*a->emat);

1095:   MPI_Comm_size(A->rmap->comm,&rsize);
1096:   MPI_Comm_size(A->cmap->comm,&csize);
1097:   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1098:   a->commsize = rsize;
1099:   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1100:   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1101:   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1102:   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1103:   return(0);
1104: }

1106: PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1107: {
1108:   Mat_Elemental  *a = (Mat_Elemental*)A->data;

1111:   /* printf("Calling ProcessQueues\n"); */
1112:   a->emat->ProcessQueues();
1113:   /* printf("Finished ProcessQueues\n"); */
1114:   return(0);
1115: }

1117: PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1118: {
1120:   /* Currently does nothing */
1121:   return(0);
1122: }

1124: PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1125: {
1127:   Mat            Adense,Ae;
1128:   MPI_Comm       comm;

1131:   PetscObjectGetComm((PetscObject)newMat,&comm);
1132:   MatCreate(comm,&Adense);
1133:   MatSetType(Adense,MATDENSE);
1134:   MatLoad(Adense,viewer);
1135:   MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);
1136:   MatDestroy(&Adense);
1137:   MatHeaderReplace(newMat,&Ae);
1138:   return(0);
1139: }

1141: /* -------------------------------------------------------------------*/
1142: static struct _MatOps MatOps_Values = {
1143:        MatSetValues_Elemental,
1144:        0,
1145:        0,
1146:        MatMult_Elemental,
1147: /* 4*/ MatMultAdd_Elemental,
1148:        MatMultTranspose_Elemental,
1149:        MatMultTransposeAdd_Elemental,
1150:        MatSolve_Elemental,
1151:        MatSolveAdd_Elemental,
1152:        0,
1153: /*10*/ 0,
1154:        MatLUFactor_Elemental,
1155:        MatCholeskyFactor_Elemental,
1156:        0,
1157:        MatTranspose_Elemental,
1158: /*15*/ MatGetInfo_Elemental,
1159:        0,
1160:        MatGetDiagonal_Elemental,
1161:        MatDiagonalScale_Elemental,
1162:        MatNorm_Elemental,
1163: /*20*/ MatAssemblyBegin_Elemental,
1164:        MatAssemblyEnd_Elemental,
1165:        MatSetOption_Elemental,
1166:        MatZeroEntries_Elemental,
1167: /*24*/ 0,
1168:        MatLUFactorSymbolic_Elemental,
1169:        MatLUFactorNumeric_Elemental,
1170:        MatCholeskyFactorSymbolic_Elemental,
1171:        MatCholeskyFactorNumeric_Elemental,
1172: /*29*/ MatSetUp_Elemental,
1173:        0,
1174:        0,
1175:        0,
1176:        0,
1177: /*34*/ MatDuplicate_Elemental,
1178:        0,
1179:        0,
1180:        0,
1181:        0,
1182: /*39*/ MatAXPY_Elemental,
1183:        0,
1184:        0,
1185:        0,
1186:        MatCopy_Elemental,
1187: /*44*/ 0,
1188:        MatScale_Elemental,
1189:        MatShift_Basic,
1190:        0,
1191:        0,
1192: /*49*/ 0,
1193:        0,
1194:        0,
1195:        0,
1196:        0,
1197: /*54*/ 0,
1198:        0,
1199:        0,
1200:        0,
1201:        0,
1202: /*59*/ 0,
1203:        MatDestroy_Elemental,
1204:        MatView_Elemental,
1205:        0,
1206:        0,
1207: /*64*/ 0,
1208:        0,
1209:        0,
1210:        0,
1211:        0,
1212: /*69*/ 0,
1213:        0,
1214:        MatConvert_Elemental_Dense,
1215:        0,
1216:        0,
1217: /*74*/ 0,
1218:        0,
1219:        0,
1220:        0,
1221:        0,
1222: /*79*/ 0,
1223:        0,
1224:        0,
1225:        0,
1226:        MatLoad_Elemental,
1227: /*84*/ 0,
1228:        0,
1229:        0,
1230:        0,
1231:        0,
1232: /*89*/ 0,
1233:        0,
1234:        MatMatMultNumeric_Elemental,
1235:        0,
1236:        0,
1237: /*94*/ 0,
1238:        0,
1239:        0,
1240:        MatMatTransposeMultNumeric_Elemental,
1241:        0,
1242: /*99*/ MatProductSetFromOptions_Elemental,
1243:        0,
1244:        0,
1245:        MatConjugate_Elemental,
1246:        0,
1247: /*104*/0,
1248:        0,
1249:        0,
1250:        0,
1251:        0,
1252: /*109*/MatMatSolve_Elemental,
1253:        0,
1254:        0,
1255:        0,
1256:        MatMissingDiagonal_Elemental,
1257: /*114*/0,
1258:        0,
1259:        0,
1260:        0,
1261:        0,
1262: /*119*/0,
1263:        MatHermitianTranspose_Elemental,
1264:        0,
1265:        0,
1266:        0,
1267: /*124*/0,
1268:        0,
1269:        0,
1270:        0,
1271:        0,
1272: /*129*/0,
1273:        0,
1274:        0,
1275:        0,
1276:        0,
1277: /*134*/0,
1278:        0,
1279:        0,
1280:        0,
1281:        0,
1282:        0,
1283: /*140*/0,
1284:        0,
1285:        0,
1286:        0,
1287:        0,
1288: /*145*/0,
1289:        0,
1290:        0
1291: };

1293: /*MC
1294:    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package

1296:   Use ./configure --download-elemental to install PETSc to use Elemental

1298:   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver

1300:    Options Database Keys:
1301: + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1302: - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix

1304:   Level: beginner

1306: .seealso: MATDENSE
1307: M*/

1309: PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1310: {
1311:   Mat_Elemental      *a;
1312:   PetscErrorCode     ierr;
1313:   PetscBool          flg,flg1;
1314:   Mat_Elemental_Grid *commgrid;
1315:   MPI_Comm           icomm;
1316:   PetscInt           optv1;

1319:   PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));
1320:   A->insertmode = NOT_SET_VALUES;

1322:   PetscNewLog(A,&a);
1323:   A->data = (void*)a;

1325:   /* Set up the elemental matrix */
1326:   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));

1328:   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1329:   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1330:     MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);
1331:   }
1332:   PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);
1333:   MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);
1334:   if (!flg) {
1335:     PetscNewLog(A,&commgrid);

1337:     PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");
1338:     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1339:     PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);
1340:     if (flg1) {
1341:       if (El::mpi::Size(cxxcomm) % optv1) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1342:       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1343:     } else {
1344:       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1345:       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1346:     }
1347:     commgrid->grid_refct = 1;
1348:     MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);

1350:     a->pivoting    = 1;
1351:     PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);

1353:     PetscOptionsEnd();
1354:   } else {
1355:     commgrid->grid_refct++;
1356:   }
1357:   PetscCommDestroy(&icomm);
1358:   a->grid        = commgrid->grid;
1359:   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1360:   a->roworiented = PETSC_TRUE;

1362:   PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);
1363:   PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);
1364:   return(0);
1365: }