Actual source code: mkl_pardiso.c

petsc-master 2019-09-20
Report Typos and Errors
  1: #include <../src/mat/impls/aij/seq/aij.h>        /*I "petscmat.h" I*/
  2: #include <../src/mat/impls/sbaij/seq/sbaij.h>
  3: #include <../src/mat/impls/dense/seq/dense.h>

  5: #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
  6: #define MKL_ILP64
  7: #endif
  8: #include <mkl_pardiso.h>

 10: PETSC_EXTERN void PetscSetMKL_PARDISOThreads(int);

 12: /*
 13:  *  Possible mkl_pardiso phases that controls the execution of the solver.
 14:  *  For more information check mkl_pardiso manual.
 15:  */
 16: #define JOB_ANALYSIS 11
 17: #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION 12
 18: #define JOB_ANALYSIS_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 13
 19: #define JOB_NUMERICAL_FACTORIZATION 22
 20: #define JOB_NUMERICAL_FACTORIZATION_SOLVE_ITERATIVE_REFINEMENT 23
 21: #define JOB_SOLVE_ITERATIVE_REFINEMENT 33
 22: #define JOB_SOLVE_FORWARD_SUBSTITUTION 331
 23: #define JOB_SOLVE_DIAGONAL_SUBSTITUTION 332
 24: #define JOB_SOLVE_BACKWARD_SUBSTITUTION 333
 25: #define JOB_RELEASE_OF_LU_MEMORY 0
 26: #define JOB_RELEASE_OF_ALL_MEMORY -1

 28: #define IPARM_SIZE 64

 30: #if defined(PETSC_USE_64BIT_INDICES)
 31:  #if defined(PETSC_HAVE_MKL_INTEL_ILP64)
 32:   #define INT_TYPE long long int
 33:   #define MKL_PARDISO pardiso
 34:   #define MKL_PARDISO_INIT pardisoinit
 35:  #else
 36:   /* this is the case where the MKL BLAS/LAPACK are 32 bit integers but the 64 bit integer version of
 37:      of Pardiso code is used; hence the need for the 64 below*/
 38:   #define INT_TYPE long long int
 39:   #define MKL_PARDISO pardiso_64
 40:   #define MKL_PARDISO_INIT pardiso_64init
 41: void pardiso_64init(void *pt, INT_TYPE *mtype, INT_TYPE iparm [])
 42: {
 43:   int iparm_copy[IPARM_SIZE], mtype_copy, i;

 45:   mtype_copy = *mtype;
 46:   pardisoinit(pt, &mtype_copy, iparm_copy);
 47:   for (i=0; i<IPARM_SIZE; i++) iparm[i] = iparm_copy[i];
 48: }
 49:  #endif
 50: #else
 51:  #define INT_TYPE int
 52:  #define MKL_PARDISO pardiso
 53:  #define MKL_PARDISO_INIT pardisoinit
 54: #endif


 57: /*
 58:  *  Internal data structure.
 59:  *  For more information check mkl_pardiso manual.
 60:  */
 61: typedef struct {

 63:   /* Configuration vector*/
 64:   INT_TYPE     iparm[IPARM_SIZE];

 66:   /*
 67:    * Internal mkl_pardiso memory location.
 68:    * After the first call to mkl_pardiso do not modify pt, as that could cause a serious memory leak.
 69:    */
 70:   void         *pt[IPARM_SIZE];

 72:   /* Basic mkl_pardiso info*/
 73:   INT_TYPE     phase, maxfct, mnum, mtype, n, nrhs, msglvl, err;

 75:   /* Matrix structure*/
 76:   void         *a;
 77:   INT_TYPE     *ia, *ja;

 79:   /* Number of non-zero elements*/
 80:   INT_TYPE     nz;

 82:   /* Row permutaton vector*/
 83:   INT_TYPE     *perm;

 85:   /* Define if matrix preserves sparse structure.*/
 86:   MatStructure matstruc;

 88:   PetscBool    needsym;
 89:   PetscBool    freeaij;

 91:   /* Schur complement */
 92:   PetscScalar  *schur;
 93:   PetscInt     schur_size;
 94:   PetscInt     *schur_idxs;
 95:   PetscScalar  *schur_work;
 96:   PetscBLASInt schur_work_size;
 97:   PetscBool    solve_interior;

 99:   /* True if mkl_pardiso function have been used.*/
100:   PetscBool CleanUp;

102:   /* Conversion to a format suitable for MKL */
103:   PetscErrorCode (*Convert)(Mat, PetscBool, MatReuse, PetscBool*, INT_TYPE*, INT_TYPE**, INT_TYPE**, PetscScalar**);
104: } Mat_MKL_PARDISO;

106: PetscErrorCode MatMKLPardiso_Convert_seqsbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
107: {
108:   Mat_SeqSBAIJ   *aa = (Mat_SeqSBAIJ*)A->data;
109:   PetscInt       bs  = A->rmap->bs,i;

113:   if (!sym) {
114:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"This should not happen");
115:   }
116:   *v      = aa->a;
117:   if (bs == 1) { /* already in the correct format */
118:     /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
119:     *r    = (INT_TYPE*)aa->i;
120:     *c    = (INT_TYPE*)aa->j;
121:     *nnz  = (INT_TYPE)aa->nz;
122:     *free = PETSC_FALSE;
123:   } else if (reuse == MAT_INITIAL_MATRIX) {
124:     PetscInt m = A->rmap->n,nz = aa->nz;
125:     PetscInt *row,*col;
126:     PetscMalloc2(m+1,&row,nz,&col);
127:     for (i=0; i<m+1; i++) {
128:       row[i] = aa->i[i]+1;
129:     }
130:     for (i=0; i<nz; i++) {
131:       col[i] = aa->j[i]+1;
132:     }
133:     *r    = (INT_TYPE*)row;
134:     *c    = (INT_TYPE*)col;
135:     *nnz  = (INT_TYPE)nz;
136:     *free = PETSC_TRUE;
137:   }
138:   return(0);
139: }

141: PetscErrorCode MatMKLPardiso_Convert_seqbaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
142: {
143:   Mat_SeqBAIJ    *aa = (Mat_SeqBAIJ*)A->data;
144:   PetscInt       bs  = A->rmap->bs,i;

148:   if (!sym) {
149:     *v      = aa->a;
150:     if (bs == 1) { /* already in the correct format */
151:       /* though PetscInt and INT_TYPE are of the same size since they are defined differently the Intel compiler requires a cast */
152:       *r    = (INT_TYPE*)aa->i;
153:       *c    = (INT_TYPE*)aa->j;
154:       *nnz  = (INT_TYPE)aa->nz;
155:       *free = PETSC_FALSE;
156:       return(0);
157:     } else if (reuse == MAT_INITIAL_MATRIX) {
158:       PetscInt m = A->rmap->n,nz = aa->nz;
159:       PetscInt *row,*col;
160:       PetscMalloc2(m+1,&row,nz,&col);
161:       for (i=0; i<m+1; i++) {
162:         row[i] = aa->i[i]+1;
163:       }
164:       for (i=0; i<nz; i++) {
165:         col[i] = aa->j[i]+1;
166:       }
167:       *r    = (INT_TYPE*)row;
168:       *c    = (INT_TYPE*)col;
169:       *nnz  = (INT_TYPE)nz;
170:     }
171:     *free = PETSC_TRUE;
172:   } else {
173:     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"This should not happen");
174:   }
175:   return(0);
176: }

178: PetscErrorCode MatMKLPardiso_Convert_seqaij(Mat A,PetscBool sym,MatReuse reuse,PetscBool *free,INT_TYPE *nnz,INT_TYPE **r,INT_TYPE **c,PetscScalar **v)
179: {
180:   Mat_SeqAIJ     *aa = (Mat_SeqAIJ*)A->data;

184:   if (!sym) { /* already in the correct format */
185:     *v    = aa->a;
186:     *r    = (INT_TYPE*)aa->i;
187:     *c    = (INT_TYPE*)aa->j;
188:     *nnz  = (INT_TYPE)aa->nz;
189:     *free = PETSC_FALSE;
190:     return(0);
191:   }
192:   /* need to get the triangular part */
193:   if (reuse == MAT_INITIAL_MATRIX) {
194:     PetscScalar *vals,*vv;
195:     PetscInt    *row,*col,*jj;
196:     PetscInt    m = A->rmap->n,nz,i;

198:     nz = 0;
199:     for (i=0; i<m; i++) {
200:       nz += aa->i[i+1] - aa->diag[i];
201:     }
202:     PetscMalloc2(m+1,&row,nz,&col);
203:     PetscMalloc1(nz,&vals);
204:     jj = col;
205:     vv = vals;

207:     row[0] = 0;
208:     for (i=0; i<m; i++) {
209:       PetscInt    *aj = aa->j + aa->diag[i];
210:       PetscScalar *av = aa->a + aa->diag[i];
211:       PetscInt    rl = aa->i[i+1] - aa->diag[i],j;
212:       for (j=0; j<rl; j++) {
213:         *jj = *aj; jj++; aj++;
214:         *vv = *av; vv++; av++;
215:       }
216:       row[i+1]    = row[i] + rl;
217:     }
218:     *v    = vals;
219:     *r    = (INT_TYPE*)row;
220:     *c    = (INT_TYPE*)col;
221:     *nnz  = (INT_TYPE)nz;
222:   } else {
223:     PetscScalar *vv;
224:     PetscInt    m = A->rmap->n,i;

226:     vv = *v;
227:     for (i=0; i<m; i++) {
228:       PetscScalar *av = aa->a + aa->diag[i];
229:       PetscInt    rl = aa->i[i+1] - aa->diag[i],j;
230:       for (j=0; j<rl; j++) {
231:         *vv = *av; vv++; av++;
232:       }
233:     }
234:   }
235:   *free = PETSC_TRUE;
236:   return(0);
237: }


240: static PetscErrorCode MatMKLPardisoSolveSchur_Private(Mat F, PetscScalar *B, PetscScalar *X)
241: {
242:   Mat_MKL_PARDISO      *mpardiso = (Mat_MKL_PARDISO*)F->data;
243:   Mat                  S,Xmat,Bmat;
244:   MatFactorSchurStatus schurstatus;
245:   PetscErrorCode       ierr;

248:   MatFactorFactorizeSchurComplement(F);
249:   MatFactorGetSchurComplement(F,&S,&schurstatus);
250:   if (X == B && schurstatus == MAT_FACTOR_SCHUR_INVERTED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"X and B cannot point to the same address");
251:   MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,B,&Bmat);
252:   MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->nrhs,X,&Xmat);
253:   if (X != B) { /* using MatMatSolve */
254:     MatCopy(Bmat,Xmat,SAME_NONZERO_PATTERN);
255:   }

257: #if defined(PETSC_USE_COMPLEX)
258:   if (mpardiso->iparm[12-1] == 1) SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Hermitian solve not implemented yet");
259: #endif

261:   switch (schurstatus) {
262:   case MAT_FACTOR_SCHUR_FACTORED:
263:     if (!mpardiso->iparm[12-1]) {
264:       MatMatSolve(S,Bmat,Xmat);
265:     } else { /* transpose solve */
266:       MatMatSolveTranspose(S,Bmat,Xmat);
267:     }
268:     break;
269:   case MAT_FACTOR_SCHUR_INVERTED:
270:     if (!mpardiso->iparm[12-1]) {
271:       MatMatMult(S,Bmat,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Xmat);
272:     } else { /* transpose solve */
273:       MatTransposeMatMult(S,Bmat,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Xmat);
274:     }
275:     break;
276:   default:
277:     SETERRQ1(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %D",F->schur_status);
278:     break;
279:   }
280:   MatFactorRestoreSchurComplement(F,&S,schurstatus);
281:   MatDestroy(&Bmat);
282:   MatDestroy(&Xmat);
283:   return(0);
284: }

286: PetscErrorCode MatFactorSetSchurIS_MKL_PARDISO(Mat F, IS is)
287: {
288:   Mat_MKL_PARDISO *mpardiso = (Mat_MKL_PARDISO*)F->data;
289:   const PetscInt  *idxs;
290:   PetscInt        size,i;
291:   PetscMPIInt     csize;
292:   PetscBool       sorted;
293:   PetscErrorCode  ierr;

296:   MPI_Comm_size(PetscObjectComm((PetscObject)F),&csize);
297:   if (csize > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MKL_PARDISO parallel Schur complements not yet supported from PETSc");
298:   ISSorted(is,&sorted);
299:   if (!sorted) {
300:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS for MKL_PARDISO Schur complements needs to be sorted");
301:   }
302:   ISGetLocalSize(is,&size);
303:   if (mpardiso->schur_size != size) {
304:     mpardiso->schur_size = size;
305:     PetscFree2(mpardiso->schur,mpardiso->schur_work);
306:     PetscFree(mpardiso->schur_idxs);
307:     PetscBLASIntCast(PetscMax(mpardiso->n,2*size),&mpardiso->schur_work_size);
308:     PetscMalloc2(size*size,&mpardiso->schur,mpardiso->schur_work_size,&mpardiso->schur_work);
309:     PetscMalloc1(size,&mpardiso->schur_idxs);
310:   }
311:   MatCreateSeqDense(PETSC_COMM_SELF,mpardiso->schur_size,mpardiso->schur_size,mpardiso->schur,&F->schur);
312:   if (mpardiso->mtype == 2) {
313:     MatSetOption(F->schur,MAT_SPD,PETSC_TRUE);
314:   }

316:   PetscArrayzero(mpardiso->perm,mpardiso->n);
317:   ISGetIndices(is,&idxs);
318:   PetscArraycpy(mpardiso->schur_idxs,idxs,size);
319:   for (i=0;i<size;i++) mpardiso->perm[idxs[i]] = 1;
320:   ISRestoreIndices(is,&idxs);
321:   if (size) { /* turn on Schur switch if the set of indices is not empty */
322:     mpardiso->iparm[36-1] = 2;
323:   }
324:   return(0);
325: }

327: PetscErrorCode MatDestroy_MKL_PARDISO(Mat A)
328: {
329:   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
330:   PetscErrorCode  ierr;

333:   if (mat_mkl_pardiso->CleanUp) {
334:     mat_mkl_pardiso->phase = JOB_RELEASE_OF_ALL_MEMORY;

336:     MKL_PARDISO (mat_mkl_pardiso->pt,
337:       &mat_mkl_pardiso->maxfct,
338:       &mat_mkl_pardiso->mnum,
339:       &mat_mkl_pardiso->mtype,
340:       &mat_mkl_pardiso->phase,
341:       &mat_mkl_pardiso->n,
342:       NULL,
343:       NULL,
344:       NULL,
345:       NULL,
346:       &mat_mkl_pardiso->nrhs,
347:       mat_mkl_pardiso->iparm,
348:       &mat_mkl_pardiso->msglvl,
349:       NULL,
350:       NULL,
351:       &mat_mkl_pardiso->err);
352:   }
353:   PetscFree(mat_mkl_pardiso->perm);
354:   PetscFree2(mat_mkl_pardiso->schur,mat_mkl_pardiso->schur_work);
355:   PetscFree(mat_mkl_pardiso->schur_idxs);
356:   if (mat_mkl_pardiso->freeaij) {
357:     PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja);
358:     if (mat_mkl_pardiso->iparm[34] == 1) {
359:       PetscFree(mat_mkl_pardiso->a);
360:     }
361:   }
362:   PetscFree(A->data);

364:   /* clear composed functions */
365:   PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);
366:   PetscObjectComposeFunction((PetscObject)A,"MatFactorSetSchurIS_C",NULL);
367:   PetscObjectComposeFunction((PetscObject)A,"MatMkl_PardisoSetCntl_C",NULL);
368:   return(0);
369: }

371: static PetscErrorCode MatMKLPardisoScatterSchur_Private(Mat_MKL_PARDISO *mpardiso, PetscScalar *whole, PetscScalar *schur, PetscBool reduce)
372: {
374:   if (reduce) { /* data given for the whole matrix */
375:     PetscInt i,m=0,p=0;
376:     for (i=0;i<mpardiso->nrhs;i++) {
377:       PetscInt j;
378:       for (j=0;j<mpardiso->schur_size;j++) {
379:         schur[p+j] = whole[m+mpardiso->schur_idxs[j]];
380:       }
381:       m += mpardiso->n;
382:       p += mpardiso->schur_size;
383:     }
384:   } else { /* from Schur to whole */
385:     PetscInt i,m=0,p=0;
386:     for (i=0;i<mpardiso->nrhs;i++) {
387:       PetscInt j;
388:       for (j=0;j<mpardiso->schur_size;j++) {
389:         whole[m+mpardiso->schur_idxs[j]] = schur[p+j];
390:       }
391:       m += mpardiso->n;
392:       p += mpardiso->schur_size;
393:     }
394:   }
395:   return(0);
396: }

398: PetscErrorCode MatSolve_MKL_PARDISO(Mat A,Vec b,Vec x)
399: {
400:   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
401:   PetscErrorCode    ierr;
402:   PetscScalar       *xarray;
403:   const PetscScalar *barray;

406:   mat_mkl_pardiso->nrhs = 1;
407:   VecGetArray(x,&xarray);
408:   VecGetArrayRead(b,&barray);

410:   if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
411:   else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;

413:   if (barray == xarray) { /* if the two vectors share the same memory */
414:     PetscScalar *work;
415:     if (!mat_mkl_pardiso->schur_work) {
416:       PetscMalloc1(mat_mkl_pardiso->n,&work);
417:     } else {
418:       work = mat_mkl_pardiso->schur_work;
419:     }
420:     mat_mkl_pardiso->iparm[6-1] = 1;
421:     MKL_PARDISO (mat_mkl_pardiso->pt,
422:       &mat_mkl_pardiso->maxfct,
423:       &mat_mkl_pardiso->mnum,
424:       &mat_mkl_pardiso->mtype,
425:       &mat_mkl_pardiso->phase,
426:       &mat_mkl_pardiso->n,
427:       mat_mkl_pardiso->a,
428:       mat_mkl_pardiso->ia,
429:       mat_mkl_pardiso->ja,
430:       NULL,
431:       &mat_mkl_pardiso->nrhs,
432:       mat_mkl_pardiso->iparm,
433:       &mat_mkl_pardiso->msglvl,
434:       (void*)xarray,
435:       (void*)work,
436:       &mat_mkl_pardiso->err);
437:     if (!mat_mkl_pardiso->schur_work) {
438:       PetscFree(work);
439:     }
440:   } else {
441:     mat_mkl_pardiso->iparm[6-1] = 0;
442:     MKL_PARDISO (mat_mkl_pardiso->pt,
443:       &mat_mkl_pardiso->maxfct,
444:       &mat_mkl_pardiso->mnum,
445:       &mat_mkl_pardiso->mtype,
446:       &mat_mkl_pardiso->phase,
447:       &mat_mkl_pardiso->n,
448:       mat_mkl_pardiso->a,
449:       mat_mkl_pardiso->ia,
450:       mat_mkl_pardiso->ja,
451:       mat_mkl_pardiso->perm,
452:       &mat_mkl_pardiso->nrhs,
453:       mat_mkl_pardiso->iparm,
454:       &mat_mkl_pardiso->msglvl,
455:       (void*)barray,
456:       (void*)xarray,
457:       &mat_mkl_pardiso->err);
458:   }
459:   VecRestoreArrayRead(b,&barray);

461:   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);

463:   if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
464:     PetscInt shift = mat_mkl_pardiso->schur_size;

466:     /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
467:     if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) {
468:       shift = 0;
469:     }

471:     if (!mat_mkl_pardiso->solve_interior) {
472:       /* solve Schur complement */
473:       MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE);
474:       MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift);
475:       MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE);
476:     } else { /* if we are solving for the interior problem, any value in barray[schur] forward-substituted to xarray[schur] will be neglected */
477:       PetscInt i;
478:       for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
479:         xarray[mat_mkl_pardiso->schur_idxs[i]] = 0.;
480:       }
481:     }

483:     /* expansion phase */
484:     mat_mkl_pardiso->iparm[6-1] = 1;
485:     mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
486:     MKL_PARDISO (mat_mkl_pardiso->pt,
487:       &mat_mkl_pardiso->maxfct,
488:       &mat_mkl_pardiso->mnum,
489:       &mat_mkl_pardiso->mtype,
490:       &mat_mkl_pardiso->phase,
491:       &mat_mkl_pardiso->n,
492:       mat_mkl_pardiso->a,
493:       mat_mkl_pardiso->ia,
494:       mat_mkl_pardiso->ja,
495:       mat_mkl_pardiso->perm,
496:       &mat_mkl_pardiso->nrhs,
497:       mat_mkl_pardiso->iparm,
498:       &mat_mkl_pardiso->msglvl,
499:       (void*)xarray,
500:       (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
501:       &mat_mkl_pardiso->err);

503:     if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
504:     mat_mkl_pardiso->iparm[6-1] = 0;
505:   }
506:   VecRestoreArray(x,&xarray);
507:   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
508:   return(0);
509: }

511: PetscErrorCode MatSolveTranspose_MKL_PARDISO(Mat A,Vec b,Vec x)
512: {
513:   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
514:   PetscInt        oiparm12;
515:   PetscErrorCode  ierr;

518:   oiparm12 = mat_mkl_pardiso->iparm[12 - 1];
519:   mat_mkl_pardiso->iparm[12 - 1] = 2;
520:   MatSolve_MKL_PARDISO(A,b,x);
521:   mat_mkl_pardiso->iparm[12 - 1] = oiparm12;
522:   return(0);
523: }

525: PetscErrorCode MatMatSolve_MKL_PARDISO(Mat A,Mat B,Mat X)
526: {
527:   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(A)->data;
528:   PetscErrorCode    ierr;
529:   const PetscScalar *barray;
530:   PetscScalar       *xarray;
531:   PetscBool         flg;

534:   PetscObjectBaseTypeCompare((PetscObject)B,MATSEQDENSE,&flg);
535:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATSEQDENSE matrix");
536:   PetscObjectBaseTypeCompare((PetscObject)X,MATSEQDENSE,&flg);
537:   if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATSEQDENSE matrix");

539:   MatGetSize(B,NULL,(PetscInt*)&mat_mkl_pardiso->nrhs);

541:   if (mat_mkl_pardiso->nrhs > 0) {
542:     MatDenseGetArrayRead(B,&barray);
543:     MatDenseGetArray(X,&xarray);

545:     if (barray == xarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"B and X cannot share the same memory location");
546:     if (!mat_mkl_pardiso->schur) mat_mkl_pardiso->phase = JOB_SOLVE_ITERATIVE_REFINEMENT;
547:     else mat_mkl_pardiso->phase = JOB_SOLVE_FORWARD_SUBSTITUTION;
548:     mat_mkl_pardiso->iparm[6-1] = 0;

550:     MKL_PARDISO (mat_mkl_pardiso->pt,
551:       &mat_mkl_pardiso->maxfct,
552:       &mat_mkl_pardiso->mnum,
553:       &mat_mkl_pardiso->mtype,
554:       &mat_mkl_pardiso->phase,
555:       &mat_mkl_pardiso->n,
556:       mat_mkl_pardiso->a,
557:       mat_mkl_pardiso->ia,
558:       mat_mkl_pardiso->ja,
559:       mat_mkl_pardiso->perm,
560:       &mat_mkl_pardiso->nrhs,
561:       mat_mkl_pardiso->iparm,
562:       &mat_mkl_pardiso->msglvl,
563:       (void*)barray,
564:       (void*)xarray,
565:       &mat_mkl_pardiso->err);
566:     if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);

568:     MatDenseRestoreArrayRead(B,&barray);
569:     if (mat_mkl_pardiso->schur) { /* solve Schur complement and expand solution */
570:       PetscScalar *o_schur_work = NULL;
571:       PetscInt    shift = mat_mkl_pardiso->schur_size*mat_mkl_pardiso->nrhs,scale;
572:       PetscInt    mem = mat_mkl_pardiso->n*mat_mkl_pardiso->nrhs;

574:       /* allocate extra memory if it is needed */
575:       scale = 1;
576:       if (A->schur_status == MAT_FACTOR_SCHUR_INVERTED) scale = 2;

578:       mem *= scale;
579:       if (mem > mat_mkl_pardiso->schur_work_size) {
580:         o_schur_work = mat_mkl_pardiso->schur_work;
581:         PetscMalloc1(mem,&mat_mkl_pardiso->schur_work);
582:       }

584:       /* if inverted, uses BLAS *MM subroutines, otherwise LAPACK *TRS */
585:       if (A->schur_status != MAT_FACTOR_SCHUR_INVERTED) shift = 0;

587:       /* solve Schur complement */
588:       if (!mat_mkl_pardiso->solve_interior) {
589:         MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work,PETSC_TRUE);
590:         MatMKLPardisoSolveSchur_Private(A,mat_mkl_pardiso->schur_work,mat_mkl_pardiso->schur_work+shift);
591:         MatMKLPardisoScatterSchur_Private(mat_mkl_pardiso,xarray,mat_mkl_pardiso->schur_work+shift,PETSC_FALSE);
592:       } else { /* if we are solving for the interior problem, any value in barray[schur,n] forward-substituted to xarray[schur,n] will be neglected */
593:         PetscInt i,n,m=0;
594:         for (n=0;n<mat_mkl_pardiso->nrhs;n++) {
595:           for (i=0;i<mat_mkl_pardiso->schur_size;i++) {
596:             xarray[mat_mkl_pardiso->schur_idxs[i]+m] = 0.;
597:           }
598:           m += mat_mkl_pardiso->n;
599:         }
600:       }

602:       /* expansion phase */
603:       mat_mkl_pardiso->iparm[6-1] = 1;
604:       mat_mkl_pardiso->phase = JOB_SOLVE_BACKWARD_SUBSTITUTION;
605:       MKL_PARDISO (mat_mkl_pardiso->pt,
606:         &mat_mkl_pardiso->maxfct,
607:         &mat_mkl_pardiso->mnum,
608:         &mat_mkl_pardiso->mtype,
609:         &mat_mkl_pardiso->phase,
610:         &mat_mkl_pardiso->n,
611:         mat_mkl_pardiso->a,
612:         mat_mkl_pardiso->ia,
613:         mat_mkl_pardiso->ja,
614:         mat_mkl_pardiso->perm,
615:         &mat_mkl_pardiso->nrhs,
616:         mat_mkl_pardiso->iparm,
617:         &mat_mkl_pardiso->msglvl,
618:         (void*)xarray,
619:         (void*)mat_mkl_pardiso->schur_work, /* according to the specs, the solution vector is always used */
620:         &mat_mkl_pardiso->err);
621:       if (o_schur_work) { /* restore original schur_work (minimal size) */
622:         PetscFree(mat_mkl_pardiso->schur_work);
623:         mat_mkl_pardiso->schur_work = o_schur_work;
624:       }
625:       if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);
626:       mat_mkl_pardiso->iparm[6-1] = 0;
627:     }
628:   }
629:   mat_mkl_pardiso->CleanUp = PETSC_TRUE;
630:   return(0);
631: }

633: PetscErrorCode MatFactorNumeric_MKL_PARDISO(Mat F,Mat A,const MatFactorInfo *info)
634: {
635:   Mat_MKL_PARDISO *mat_mkl_pardiso=(Mat_MKL_PARDISO*)(F)->data;
636:   PetscErrorCode  ierr;

639:   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
640:   (*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_REUSE_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a);

642:   mat_mkl_pardiso->phase = JOB_NUMERICAL_FACTORIZATION;
643:   MKL_PARDISO (mat_mkl_pardiso->pt,
644:     &mat_mkl_pardiso->maxfct,
645:     &mat_mkl_pardiso->mnum,
646:     &mat_mkl_pardiso->mtype,
647:     &mat_mkl_pardiso->phase,
648:     &mat_mkl_pardiso->n,
649:     mat_mkl_pardiso->a,
650:     mat_mkl_pardiso->ia,
651:     mat_mkl_pardiso->ja,
652:     mat_mkl_pardiso->perm,
653:     &mat_mkl_pardiso->nrhs,
654:     mat_mkl_pardiso->iparm,
655:     &mat_mkl_pardiso->msglvl,
656:     NULL,
657:     (void*)mat_mkl_pardiso->schur,
658:     &mat_mkl_pardiso->err);
659:   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);

661:   if (F->schur) { /* schur output from pardiso is in row major format */
662:     MatFactorRestoreSchurComplement(F,NULL,MAT_FACTOR_SCHUR_UNFACTORED);
663:     MatTranspose(F->schur,MAT_INPLACE_MATRIX,&F->schur);
664:   }
665:   mat_mkl_pardiso->matstruc = SAME_NONZERO_PATTERN;
666:   mat_mkl_pardiso->CleanUp  = PETSC_TRUE;
667:   return(0);
668: }

670: PetscErrorCode PetscSetMKL_PARDISOFromOptions(Mat F, Mat A)
671: {
672:   Mat_MKL_PARDISO     *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
673:   PetscErrorCode      ierr;
674:   PetscInt            icntl,threads=1;
675:   PetscBool           flg;

678:   PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"MKL_PARDISO Options","Mat");

680:   PetscOptionsInt("-mat_mkl_pardiso_65","Number of threads to use within PARDISO","None",threads,&threads,&flg);
681:   if (flg) PetscSetMKL_PARDISOThreads((int)threads);

683:   PetscOptionsInt("-mat_mkl_pardiso_66","Maximum number of factors with identical sparsity structure that must be kept in memory at the same time","None",mat_mkl_pardiso->maxfct,&icntl,&flg);
684:   if (flg) mat_mkl_pardiso->maxfct = icntl;

686:   PetscOptionsInt("-mat_mkl_pardiso_67","Indicates the actual matrix for the solution phase","None",mat_mkl_pardiso->mnum,&icntl,&flg);
687:   if (flg) mat_mkl_pardiso->mnum = icntl;

689:   PetscOptionsInt("-mat_mkl_pardiso_68","Message level information","None",mat_mkl_pardiso->msglvl,&icntl,&flg);
690:   if (flg) mat_mkl_pardiso->msglvl = icntl;

692:   PetscOptionsInt("-mat_mkl_pardiso_69","Defines the matrix type","None",mat_mkl_pardiso->mtype,&icntl,&flg);
693:   if (flg) {
694:     void *pt[IPARM_SIZE];
695:     mat_mkl_pardiso->mtype = icntl;
696:     MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
697: #if defined(PETSC_USE_REAL_SINGLE)
698:     mat_mkl_pardiso->iparm[27] = 1;
699: #else
700:     mat_mkl_pardiso->iparm[27] = 0;
701: #endif
702:     mat_mkl_pardiso->iparm[34] = 1; /* use 0-based indexing */
703:   }
704:   PetscOptionsInt("-mat_mkl_pardiso_1","Use default values","None",mat_mkl_pardiso->iparm[0],&icntl,&flg);

706:   if (flg && icntl != 0) {
707:     PetscOptionsInt("-mat_mkl_pardiso_2","Fill-in reducing ordering for the input matrix","None",mat_mkl_pardiso->iparm[1],&icntl,&flg);
708:     if (flg) mat_mkl_pardiso->iparm[1] = icntl;

710:     PetscOptionsInt("-mat_mkl_pardiso_4","Preconditioned CGS/CG","None",mat_mkl_pardiso->iparm[3],&icntl,&flg);
711:     if (flg) mat_mkl_pardiso->iparm[3] = icntl;

713:     PetscOptionsInt("-mat_mkl_pardiso_5","User permutation","None",mat_mkl_pardiso->iparm[4],&icntl,&flg);
714:     if (flg) mat_mkl_pardiso->iparm[4] = icntl;

716:     PetscOptionsInt("-mat_mkl_pardiso_6","Write solution on x","None",mat_mkl_pardiso->iparm[5],&icntl,&flg);
717:     if (flg) mat_mkl_pardiso->iparm[5] = icntl;

719:     PetscOptionsInt("-mat_mkl_pardiso_8","Iterative refinement step","None",mat_mkl_pardiso->iparm[7],&icntl,&flg);
720:     if (flg) mat_mkl_pardiso->iparm[7] = icntl;

722:     PetscOptionsInt("-mat_mkl_pardiso_10","Pivoting perturbation","None",mat_mkl_pardiso->iparm[9],&icntl,&flg);
723:     if (flg) mat_mkl_pardiso->iparm[9] = icntl;

725:     PetscOptionsInt("-mat_mkl_pardiso_11","Scaling vectors","None",mat_mkl_pardiso->iparm[10],&icntl,&flg);
726:     if (flg) mat_mkl_pardiso->iparm[10] = icntl;

728:     PetscOptionsInt("-mat_mkl_pardiso_12","Solve with transposed or conjugate transposed matrix A","None",mat_mkl_pardiso->iparm[11],&icntl,&flg);
729:     if (flg) mat_mkl_pardiso->iparm[11] = icntl;

731:     PetscOptionsInt("-mat_mkl_pardiso_13","Improved accuracy using (non-) symmetric weighted matching","None",mat_mkl_pardiso->iparm[12],&icntl,&flg);
732:     if (flg) mat_mkl_pardiso->iparm[12] = icntl;

734:     PetscOptionsInt("-mat_mkl_pardiso_18","Numbers of non-zero elements","None",mat_mkl_pardiso->iparm[17],&icntl,&flg);
735:     if (flg) mat_mkl_pardiso->iparm[17] = icntl;

737:     PetscOptionsInt("-mat_mkl_pardiso_19","Report number of floating point operations","None",mat_mkl_pardiso->iparm[18],&icntl,&flg);
738:     if (flg) mat_mkl_pardiso->iparm[18] = icntl;

740:     PetscOptionsInt("-mat_mkl_pardiso_21","Pivoting for symmetric indefinite matrices","None",mat_mkl_pardiso->iparm[20],&icntl,&flg);
741:     if (flg) mat_mkl_pardiso->iparm[20] = icntl;

743:     PetscOptionsInt("-mat_mkl_pardiso_24","Parallel factorization control","None",mat_mkl_pardiso->iparm[23],&icntl,&flg);
744:     if (flg) mat_mkl_pardiso->iparm[23] = icntl;

746:     PetscOptionsInt("-mat_mkl_pardiso_25","Parallel forward/backward solve control","None",mat_mkl_pardiso->iparm[24],&icntl,&flg);
747:     if (flg) mat_mkl_pardiso->iparm[24] = icntl;

749:     PetscOptionsInt("-mat_mkl_pardiso_27","Matrix checker","None",mat_mkl_pardiso->iparm[26],&icntl,&flg);
750:     if (flg) mat_mkl_pardiso->iparm[26] = icntl;

752:     PetscOptionsInt("-mat_mkl_pardiso_31","Partial solve and computing selected components of the solution vectors","None",mat_mkl_pardiso->iparm[30],&icntl,&flg);
753:     if (flg) mat_mkl_pardiso->iparm[30] = icntl;

755:     PetscOptionsInt("-mat_mkl_pardiso_34","Optimal number of threads for conditional numerical reproducibility (CNR) mode","None",mat_mkl_pardiso->iparm[33],&icntl,&flg);
756:     if (flg) mat_mkl_pardiso->iparm[33] = icntl;

758:     PetscOptionsInt("-mat_mkl_pardiso_60","Intel MKL_PARDISO mode","None",mat_mkl_pardiso->iparm[59],&icntl,&flg);
759:     if (flg) mat_mkl_pardiso->iparm[59] = icntl;
760:   }
761:   PetscOptionsEnd();
762:   return(0);
763: }

765: PetscErrorCode MatFactorMKL_PARDISOInitialize_Private(Mat A, MatFactorType ftype, Mat_MKL_PARDISO *mat_mkl_pardiso)
766: {
768:   PetscInt       i,bs;
769:   PetscBool      match;

772:   for (i=0; i<IPARM_SIZE; i++) mat_mkl_pardiso->iparm[i] = 0;
773:   for (i=0; i<IPARM_SIZE; i++) mat_mkl_pardiso->pt[i] = 0;
774:   /* Default options for both sym and unsym */
775:   mat_mkl_pardiso->iparm[ 0] =  1; /* Solver default parameters overriden with provided by iparm */
776:   mat_mkl_pardiso->iparm[ 1] =  2; /* Metis reordering */
777:   mat_mkl_pardiso->iparm[ 5] =  0; /* Write solution into x */
778:   mat_mkl_pardiso->iparm[ 7] =  0; /* Max number of iterative refinement steps */
779:   mat_mkl_pardiso->iparm[17] = -1; /* Output: Number of nonzeros in the factor LU */
780:   mat_mkl_pardiso->iparm[18] = -1; /* Output: Mflops for LU factorization */
781: #if 0
782:   mat_mkl_pardiso->iparm[23] =  1; /* Parallel factorization control*/
783: #endif
784:   PetscObjectTypeCompareAny((PetscObject)A,&match,MATSEQBAIJ,MATSEQSBAIJ,"");
785:   MatGetBlockSize(A,&bs);
786:   if (!match || bs == 1) {
787:     mat_mkl_pardiso->iparm[34] = 1; /* Cluster Sparse Solver use C-style indexing for ia and ja arrays */
788:     mat_mkl_pardiso->n         = A->rmap->N;
789:   } else {
790:     mat_mkl_pardiso->iparm[34] = 0; /* Cluster Sparse Solver use Fortran-style indexing for ia and ja arrays */
791:     mat_mkl_pardiso->iparm[36] = bs;
792:     mat_mkl_pardiso->n         = A->rmap->N/bs;
793:   }
794:   mat_mkl_pardiso->iparm[39] =  0; /* Input: matrix/rhs/solution stored on master */

796:   mat_mkl_pardiso->CleanUp   = PETSC_FALSE;
797:   mat_mkl_pardiso->maxfct    = 1; /* Maximum number of numerical factorizations. */
798:   mat_mkl_pardiso->mnum      = 1; /* Which factorization to use. */
799:   mat_mkl_pardiso->msglvl    = 0; /* 0: do not print 1: Print statistical information in file */
800:   mat_mkl_pardiso->phase     = -1;
801:   mat_mkl_pardiso->err       = 0;

803:   mat_mkl_pardiso->nrhs      = 1;
804:   mat_mkl_pardiso->err       = 0;
805:   mat_mkl_pardiso->phase     = -1;

807:   if (ftype == MAT_FACTOR_LU) {
808:     mat_mkl_pardiso->iparm[ 9] = 13; /* Perturb the pivot elements with 1E-13 */
809:     mat_mkl_pardiso->iparm[10] =  1; /* Use nonsymmetric permutation and scaling MPS */
810:     mat_mkl_pardiso->iparm[12] =  1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */

812:   } else {
813:     mat_mkl_pardiso->iparm[ 9] = 13; /* Perturb the pivot elements with 1E-13 */
814:     mat_mkl_pardiso->iparm[10] = 0; /* Use nonsymmetric permutation and scaling MPS */
815:     mat_mkl_pardiso->iparm[12] = 1; /* Switch on Maximum Weighted Matching algorithm (default for non-symmetric) */
816: /*    mat_mkl_pardiso->iparm[20] =  1; */ /* Apply 1x1 and 2x2 Bunch-Kaufman pivoting during the factorization process */
817: #if defined(PETSC_USE_DEBUG)
818:     mat_mkl_pardiso->iparm[26] = 1; /* Matrix checker */
819: #endif
820:   }
821:   PetscMalloc1(A->rmap->N*sizeof(INT_TYPE), &mat_mkl_pardiso->perm);
822:   for (i=0; i<A->rmap->N; i++) {
823:     mat_mkl_pardiso->perm[i] = 0;
824:   }
825:   mat_mkl_pardiso->schur_size = 0;
826:   return(0);
827: }

829: PetscErrorCode MatFactorSymbolic_AIJMKL_PARDISO_Private(Mat F,Mat A,const MatFactorInfo *info)
830: {
831:   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;
832:   PetscErrorCode  ierr;

835:   mat_mkl_pardiso->matstruc = DIFFERENT_NONZERO_PATTERN;
836:   PetscSetMKL_PARDISOFromOptions(F,A);

838:   /* throw away any previously computed structure */
839:   if (mat_mkl_pardiso->freeaij) {
840:     PetscFree2(mat_mkl_pardiso->ia,mat_mkl_pardiso->ja);
841:     if (mat_mkl_pardiso->iparm[34] == 1) {
842:       PetscFree(mat_mkl_pardiso->a);
843:     }
844:   }
845:   (*mat_mkl_pardiso->Convert)(A,mat_mkl_pardiso->needsym,MAT_INITIAL_MATRIX,&mat_mkl_pardiso->freeaij,&mat_mkl_pardiso->nz,&mat_mkl_pardiso->ia,&mat_mkl_pardiso->ja,(PetscScalar**)&mat_mkl_pardiso->a);
846:   if (mat_mkl_pardiso->iparm[34] == 1) mat_mkl_pardiso->n = A->rmap->N;
847:   else mat_mkl_pardiso->n = A->rmap->N/A->rmap->bs;

849:   mat_mkl_pardiso->phase = JOB_ANALYSIS;

851:   MKL_PARDISO (mat_mkl_pardiso->pt,
852:     &mat_mkl_pardiso->maxfct,
853:     &mat_mkl_pardiso->mnum,
854:     &mat_mkl_pardiso->mtype,
855:     &mat_mkl_pardiso->phase,
856:     &mat_mkl_pardiso->n,
857:     mat_mkl_pardiso->a,
858:     mat_mkl_pardiso->ia,
859:     mat_mkl_pardiso->ja,
860:     mat_mkl_pardiso->perm,
861:     &mat_mkl_pardiso->nrhs,
862:     mat_mkl_pardiso->iparm,
863:     &mat_mkl_pardiso->msglvl,
864:     NULL,
865:     NULL,
866:     &mat_mkl_pardiso->err);
867:   if (mat_mkl_pardiso->err < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by MKL_PARDISO: err=%d. Please check manual",mat_mkl_pardiso->err);

869:   mat_mkl_pardiso->CleanUp = PETSC_TRUE;

871:   if (F->factortype == MAT_FACTOR_LU) F->ops->lufactornumeric = MatFactorNumeric_MKL_PARDISO;
872:   else F->ops->choleskyfactornumeric = MatFactorNumeric_MKL_PARDISO;

874:   F->ops->solve           = MatSolve_MKL_PARDISO;
875:   F->ops->solvetranspose  = MatSolveTranspose_MKL_PARDISO;
876:   F->ops->matsolve        = MatMatSolve_MKL_PARDISO;
877:   return(0);
878: }

880: PetscErrorCode MatLUFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
881: {

885:   MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info);
886:   return(0);
887: }

889: #if !defined(PETSC_USE_COMPLEX)
890: PetscErrorCode MatGetInertia_MKL_PARDISO(Mat F,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
891: {
892:   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)F->data;

895:   if (nneg) *nneg = mat_mkl_pardiso->iparm[22];
896:   if (npos) *npos = mat_mkl_pardiso->iparm[21];
897:   if (nzero) *nzero = F->rmap->N - (mat_mkl_pardiso->iparm[22] + mat_mkl_pardiso->iparm[21]);
898:   return(0);
899: }
900: #endif

902: PetscErrorCode MatCholeskyFactorSymbolic_AIJMKL_PARDISO(Mat F,Mat A,IS r,const MatFactorInfo *info)
903: {

907:   MatFactorSymbolic_AIJMKL_PARDISO_Private(F, A, info);
908: #if defined(PETSC_USE_COMPLEX)
909:   F->ops->getinertia = NULL;
910: #else
911:   F->ops->getinertia = MatGetInertia_MKL_PARDISO;
912: #endif
913:   return(0);
914: }

916: PetscErrorCode MatView_MKL_PARDISO(Mat A, PetscViewer viewer)
917: {
918:   PetscErrorCode    ierr;
919:   PetscBool         iascii;
920:   PetscViewerFormat format;
921:   Mat_MKL_PARDISO   *mat_mkl_pardiso=(Mat_MKL_PARDISO*)A->data;
922:   PetscInt          i;

925:   if (A->ops->solve != MatSolve_MKL_PARDISO) return(0);

927:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
928:   if (iascii) {
929:     PetscViewerGetFormat(viewer,&format);
930:     if (format == PETSC_VIEWER_ASCII_INFO) {
931:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO run parameters:\n");
932:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO phase:             %d \n",mat_mkl_pardiso->phase);
933:       for (i=1; i<=64; i++) {
934:         PetscViewerASCIIPrintf(viewer,"MKL_PARDISO iparm[%d]:     %d \n",i, mat_mkl_pardiso->iparm[i - 1]);
935:       }
936:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO maxfct:     %d \n", mat_mkl_pardiso->maxfct);
937:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mnum:     %d \n", mat_mkl_pardiso->mnum);
938:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO mtype:     %d \n", mat_mkl_pardiso->mtype);
939:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO n:     %d \n", mat_mkl_pardiso->n);
940:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO nrhs:     %d \n", mat_mkl_pardiso->nrhs);
941:       PetscViewerASCIIPrintf(viewer,"MKL_PARDISO msglvl:     %d \n", mat_mkl_pardiso->msglvl);
942:     }
943:   }
944:   return(0);
945: }


948: PetscErrorCode MatGetInfo_MKL_PARDISO(Mat A, MatInfoType flag, MatInfo *info)
949: {
950:   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)A->data;

953:   info->block_size        = 1.0;
954:   info->nz_used           = mat_mkl_pardiso->nz;
955:   info->nz_allocated      = mat_mkl_pardiso->nz;
956:   info->nz_unneeded       = 0.0;
957:   info->assemblies        = 0.0;
958:   info->mallocs           = 0.0;
959:   info->memory            = 0.0;
960:   info->fill_ratio_given  = 0;
961:   info->fill_ratio_needed = 0;
962:   info->factor_mallocs    = 0;
963:   return(0);
964: }

966: PetscErrorCode MatMkl_PardisoSetCntl_MKL_PARDISO(Mat F,PetscInt icntl,PetscInt ival)
967: {
968:   Mat_MKL_PARDISO *mat_mkl_pardiso = (Mat_MKL_PARDISO*)F->data;

971:   if (icntl <= 64) {
972:     mat_mkl_pardiso->iparm[icntl - 1] = ival;
973:   } else {
974:     if (icntl == 65) PetscSetMKL_PARDISOThreads(ival);
975:     else if (icntl == 66) mat_mkl_pardiso->maxfct = ival;
976:     else if (icntl == 67) mat_mkl_pardiso->mnum = ival;
977:     else if (icntl == 68) mat_mkl_pardiso->msglvl = ival;
978:     else if (icntl == 69) {
979:       void *pt[IPARM_SIZE];
980:       mat_mkl_pardiso->mtype = ival;
981:       MKL_PARDISO_INIT(pt, &mat_mkl_pardiso->mtype, mat_mkl_pardiso->iparm);
982: #if defined(PETSC_USE_REAL_SINGLE)
983:       mat_mkl_pardiso->iparm[27] = 1;
984: #else
985:       mat_mkl_pardiso->iparm[27] = 0;
986: #endif
987:       mat_mkl_pardiso->iparm[34] = 1;
988:     } else if (icntl==70) mat_mkl_pardiso->solve_interior = (PetscBool)!!ival;
989:   }
990:   return(0);
991: }

993: /*@
994:   MatMkl_PardisoSetCntl - Set Mkl_Pardiso parameters

996:    Logically Collective on Mat

998:    Input Parameters:
999: +  F - the factored matrix obtained by calling MatGetFactor()
1000: .  icntl - index of Mkl_Pardiso parameter
1001: -  ival - value of Mkl_Pardiso parameter

1003:   Options Database:
1004: .   -mat_mkl_pardiso_<icntl> <ival>

1006:    Level: beginner

1008:    References:
1009: .      Mkl_Pardiso Users' Guide

1011: .seealso: MatGetFactor()
1012: @*/
1013: PetscErrorCode MatMkl_PardisoSetCntl(Mat F,PetscInt icntl,PetscInt ival)
1014: {

1018:   PetscTryMethod(F,"MatMkl_PardisoSetCntl_C",(Mat,PetscInt,PetscInt),(F,icntl,ival));
1019:   return(0);
1020: }

1022: /*MC
1023:   MATSOLVERMKL_PARDISO -  A matrix type providing direct solvers (LU) for
1024:   sequential matrices via the external package MKL_PARDISO.

1026:   Works with MATSEQAIJ matrices

1028:   Use -pc_type lu -pc_factor_mat_solver_type mkl_pardiso to use this direct solver

1030:   Options Database Keys:
1031: + -mat_mkl_pardiso_65 - Number of threads to use within MKL_PARDISO
1032: . -mat_mkl_pardiso_66 - Maximum number of factors with identical sparsity structure that must be kept in memory at the same time
1033: . -mat_mkl_pardiso_67 - Indicates the actual matrix for the solution phase
1034: . -mat_mkl_pardiso_68 - Message level information
1035: . -mat_mkl_pardiso_69 - Defines the matrix type. IMPORTANT: When you set this flag, iparm parameters are going to be set to the default ones for the matrix type
1036: . -mat_mkl_pardiso_1  - Use default values
1037: . -mat_mkl_pardiso_2  - Fill-in reducing ordering for the input matrix
1038: . -mat_mkl_pardiso_4  - Preconditioned CGS/CG
1039: . -mat_mkl_pardiso_5  - User permutation
1040: . -mat_mkl_pardiso_6  - Write solution on x
1041: . -mat_mkl_pardiso_8  - Iterative refinement step
1042: . -mat_mkl_pardiso_10 - Pivoting perturbation
1043: . -mat_mkl_pardiso_11 - Scaling vectors
1044: . -mat_mkl_pardiso_12 - Solve with transposed or conjugate transposed matrix A
1045: . -mat_mkl_pardiso_13 - Improved accuracy using (non-) symmetric weighted matching
1046: . -mat_mkl_pardiso_18 - Numbers of non-zero elements
1047: . -mat_mkl_pardiso_19 - Report number of floating point operations
1048: . -mat_mkl_pardiso_21 - Pivoting for symmetric indefinite matrices
1049: . -mat_mkl_pardiso_24 - Parallel factorization control
1050: . -mat_mkl_pardiso_25 - Parallel forward/backward solve control
1051: . -mat_mkl_pardiso_27 - Matrix checker
1052: . -mat_mkl_pardiso_31 - Partial solve and computing selected components of the solution vectors
1053: . -mat_mkl_pardiso_34 - Optimal number of threads for conditional numerical reproducibility (CNR) mode
1054: - -mat_mkl_pardiso_60 - Intel MKL_PARDISO mode

1056:   Level: beginner

1058:   For more information please check  mkl_pardiso manual

1060: .seealso: PCFactorSetMatSolverType(), MatSolverType

1062: M*/
1063: static PetscErrorCode MatFactorGetSolverType_mkl_pardiso(Mat A, MatSolverType *type)
1064: {
1066:   *type = MATSOLVERMKL_PARDISO;
1067:   return(0);
1068: }

1070: PETSC_EXTERN PetscErrorCode MatGetFactor_aij_mkl_pardiso(Mat A,MatFactorType ftype,Mat *F)
1071: {
1072:   Mat             B;
1073:   PetscErrorCode  ierr;
1074:   Mat_MKL_PARDISO *mat_mkl_pardiso;
1075:   PetscBool       isSeqAIJ,isSeqBAIJ,isSeqSBAIJ;

1078:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
1079:   PetscObjectTypeCompare((PetscObject)A,MATSEQBAIJ,&isSeqBAIJ);
1080:   PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
1081:   MatCreate(PetscObjectComm((PetscObject)A),&B);
1082:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
1083:   PetscStrallocpy("mkl_pardiso",&((PetscObject)B)->type_name);
1084:   MatSetUp(B);

1086:   PetscNewLog(B,&mat_mkl_pardiso);
1087:   B->data = mat_mkl_pardiso;

1089:   MatFactorMKL_PARDISOInitialize_Private(A, ftype, mat_mkl_pardiso);
1090:   if (ftype == MAT_FACTOR_LU) {
1091:     B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJMKL_PARDISO;
1092:     B->factortype            = MAT_FACTOR_LU;
1093:     mat_mkl_pardiso->needsym = PETSC_FALSE;
1094:     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1095:     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1096:     else if (isSeqSBAIJ) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU factor with SEQSBAIJ format! Use MAT_FACTOR_CHOLESKY instead");
1097:     else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO LU with %s format",((PetscObject)A)->type_name);
1098: #if defined(PETSC_USE_COMPLEX)
1099:     mat_mkl_pardiso->mtype = 13;
1100: #else
1101:     mat_mkl_pardiso->mtype = 11;
1102: #endif
1103:   } else {
1104: #if defined(PETSC_USE_COMPLEX)
1105:     SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with complex scalars! Use MAT_FACTOR_LU instead",((PetscObject)A)->type_name);
1106: #endif
1107:     B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_AIJMKL_PARDISO;
1108:     B->factortype                  = MAT_FACTOR_CHOLESKY;
1109:     if (isSeqAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqaij;
1110:     else if (isSeqBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqbaij;
1111:     else if (isSeqSBAIJ) mat_mkl_pardiso->Convert = MatMKLPardiso_Convert_seqsbaij;
1112:     else SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for PARDISO CHOLESKY with %s format",((PetscObject)A)->type_name);

1114:     mat_mkl_pardiso->needsym = PETSC_TRUE;
1115:     if (A->spd_set && A->spd) mat_mkl_pardiso->mtype = 2;
1116:     else                      mat_mkl_pardiso->mtype = -2;
1117:   }
1118:   B->ops->destroy          = MatDestroy_MKL_PARDISO;
1119:   B->ops->view             = MatView_MKL_PARDISO;
1120:   B->factortype            = ftype;
1121:   B->ops->getinfo          = MatGetInfo_MKL_PARDISO;
1122:   B->assembled             = PETSC_TRUE;

1124:   PetscFree(B->solvertype);
1125:   PetscStrallocpy(MATSOLVERMKL_PARDISO,&B->solvertype);

1127:   PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_mkl_pardiso);
1128:   PetscObjectComposeFunction((PetscObject)B,"MatFactorSetSchurIS_C",MatFactorSetSchurIS_MKL_PARDISO);
1129:   PetscObjectComposeFunction((PetscObject)B,"MatMkl_PardisoSetCntl_C",MatMkl_PardisoSetCntl_MKL_PARDISO);

1131:   *F = B;
1132:   return(0);
1133: }

1135: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_MKL_Pardiso(void)
1136: {

1140:   MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mkl_pardiso);
1141:   MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso);
1142:   MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQBAIJ,MAT_FACTOR_LU,MatGetFactor_aij_mkl_pardiso);
1143:   MatSolverTypeRegister(MATSOLVERMKL_PARDISO,MATSEQSBAIJ,MAT_FACTOR_CHOLESKY,MatGetFactor_aij_mkl_pardiso);
1144:   return(0);
1145: }