Actual source code: pastix.c

petsc-3.3-p7 2013-05-11
  1: /*
  2:  Provides an interface to the PaStiX sparse solver
  3:  */
  4: #include <../src/mat/impls/aij/seq/aij.h>
  5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
  7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>

  9: #if defined(PETSC_HAVE_STDLIB_H)
 10: #include <stdlib.h>
 11: #endif
 12: #if defined(PETSC_HAVE_STRING_H)
 13: #include <string.h>
 14: #endif

 16: #if defined(PETSC_USE_COMPLEX) && defined(__cplusplus)
 17: #define _H_COMPLEX
 18: #endif

 20: EXTERN_C_BEGIN
 21: #include <pastix.h>
 22: EXTERN_C_END

 24: #ifdef PETSC_USE_COMPLEX
 25: #ifdef PETSC_USE_REAL_SINGLE
 26: #define PASTIX_CALL c_pastix
 27: #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
 28: #define PastixScalar COMPLEX
 29: #else
 30: #define PASTIX_CALL z_pastix
 31: #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
 32: #define PastixScalar DCOMPLEX
 33: #endif

 35: #else /* PETSC_USE_COMPLEX */

 37: #ifdef PETSC_USE_REAL_SINGLE
 38: #define PASTIX_CALL s_pastix
 39: #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
 40: #define PastixScalar float
 41: #else
 42: #define PASTIX_CALL d_pastix
 43: #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
 44: #define PastixScalar double
 45: #endif

 47: #endif /* PETSC_USE_COMPLEX */

 49: typedef struct Mat_Pastix_ {
 50:   pastix_data_t *pastix_data;    /* Pastix data storage structure                        */
 51:   MatStructure   matstruc;
 52:   PetscInt       n;              /* Number of columns in the matrix                      */
 53:   PetscInt       *colptr;        /* Index of first element of each column in row and val */
 54:   PetscInt       *row;           /* Row of each element of the matrix                    */
 55:   PetscScalar    *val;           /* Value of each element of the matrix                  */
 56:   PetscInt       *perm;          /* Permutation tabular                                  */
 57:   PetscInt       *invp;          /* Reverse permutation tabular                          */
 58:   PetscScalar    *rhs;           /* Rhight-hand-side member                              */
 59:   PetscInt       rhsnbr;         /* Rhight-hand-side number (must be 1)                  */
 60:   PetscInt       iparm[64];      /* Integer parameters                                   */
 61:   double         dparm[64];      /* Floating point parameters                            */
 62:   MPI_Comm       pastix_comm;    /* PaStiX MPI communicator                              */
 63:   PetscMPIInt    commRank;       /* MPI rank                                             */
 64:   PetscMPIInt    commSize;       /* MPI communicator size                                */
 65:   PetscBool      CleanUpPastix;  /* Boolean indicating if we call PaStiX clean step      */
 66:   VecScatter     scat_rhs;
 67:   VecScatter     scat_sol;
 68:   Vec            b_seq;
 69:   PetscBool      isAIJ;
 70:   PetscErrorCode (*Destroy)(Mat);
 71: } Mat_Pastix;

 73: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);

 77: /*
 78:    convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]

 80:   input:
 81:     A       - matrix in seqaij or mpisbaij (bs=1) format
 82:     valOnly - FALSE: spaces are allocated and values are set for the CSC
 83:               TRUE:  Only fill values
 84:   output:
 85:     n       - Size of the matrix
 86:     colptr  - Index of first element of each column in row and val
 87:     row     - Row of each element of the matrix
 88:     values  - Value of each element of the matrix
 89:  */
 90: PetscErrorCode MatConvertToCSC(Mat A,PetscBool  valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
 91: {
 92:   Mat_SeqAIJ     *aa      = (Mat_SeqAIJ*)A->data;
 93:   PetscInt       *rowptr  = aa->i;
 94:   PetscInt       *col     = aa->j;
 95:   PetscScalar    *rvalues = aa->a;
 96:   PetscInt        m       = A->rmap->N;
 97:   PetscInt        nnz;
 98:   PetscInt        i,j, k;
 99:   PetscInt        base = 1;
100:   PetscInt        idx;
101:   PetscErrorCode  ierr;
102:   PetscInt        colidx;
103:   PetscInt       *colcount;
104:   PetscBool       isSBAIJ;
105:   PetscBool       isSeqSBAIJ;
106:   PetscBool       isMpiSBAIJ;
107:   PetscBool       isSym;
108:   PetscBool       flg;
109:   PetscInt        icntl;
110:   PetscInt        verb;
111:   PetscInt        check;

114:   MatIsSymmetric(A,0.0,&isSym);
115:   PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
116:   PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
117:   PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);

119:   *n = A->cmap->N;

121:   /* PaStiX only needs triangular matrix if matrix is symmetric
122:    */
123:   if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) {
124:     nnz = (aa->nz - *n)/2 + *n;
125:   }
126:   else {
127:     nnz     = aa->nz;
128:   }

130:   if (!valOnly){
131:     PetscMalloc(((*n)+1) *sizeof(PetscInt)   ,colptr );
132:     PetscMalloc( nnz     *sizeof(PetscInt)   ,row);
133:     PetscMalloc( nnz     *sizeof(PetscScalar),values);

135:     if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
136:         PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
137:         for (i = 0; i < *n+1; i++)
138:           (*colptr)[i] += base;
139:         PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
140:         for (i = 0; i < nnz; i++)
141:           (*row)[i] += base;
142:         PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
143:     } else {
144:       PetscMalloc((*n)*sizeof(PetscInt)   ,&colcount);

146:       for (i = 0; i < m; i++) colcount[i] = 0;
147:       /* Fill-in colptr */
148:       for (i = 0; i < m; i++) {
149:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
150:           if (!isSym || col[j] <= i)  colcount[col[j]]++;
151:         }
152:       }

154:       (*colptr)[0] = base;
155:       for (j = 0; j < *n; j++) {
156:         (*colptr)[j+1] = (*colptr)[j] + colcount[j];
157:         /* in next loop we fill starting from (*colptr)[colidx] - base */
158:         colcount[j] = -base;
159:       }

161:       /* Fill-in rows and values */
162:       for (i = 0; i < m; i++) {
163:         for (j = rowptr[i]; j < rowptr[i+1]; j++) {
164:           if (!isSym || col[j] <= i) {
165:             colidx = col[j];
166:             idx    = (*colptr)[colidx] + colcount[colidx];
167:             (*row)[idx]    = i + base;
168:             (*values)[idx] = rvalues[j];
169:             colcount[colidx]++;
170:           }
171:         }
172:       }
173:       PetscFree(colcount);
174:     }
175:   } else {
176:     /* Fill-in only values */
177:     for (i = 0; i < m; i++) {
178:       for (j = rowptr[i]; j < rowptr[i+1]; j++) {
179:         colidx = col[j];
180:         if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i)
181:           {
182:             /* look for the value to fill */
183:             for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
184:               if (((*row)[k]-base) == i) {
185:                 (*values)[k] = rvalues[j];
186:                 break;
187:               }
188:             }
189:             /* data structure of sparse matrix has changed */
190:             if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
191:           }
192:       }
193:     }
194:   }
195: 
196:   icntl=-1;
197:   check = 0;
198:   PetscOptionsInt("-mat_pastix_check","Check the matrix 0 : no, 1 : yes)","None",check,&icntl,&flg);
199:   if ((flg && icntl >= 0) || PetscLogPrintInfo) {
200:     check =  icntl;
201:   }
202:   if (check == 1) {
203:     PetscScalar *tmpvalues;
204:     PetscInt    *tmprows,*tmpcolptr;
205:     tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar));    if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
206:     tmprows   = (PetscInt*)   malloc(nnz*sizeof(PetscInt));       if (!tmprows)   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
207:     tmpcolptr = (PetscInt*)   malloc((*n+1)*sizeof(PetscInt));    if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");

209:     PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
210:     PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
211:     PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
212:     PetscFree(*row);
213:     PetscFree(*values);

215:     icntl=-1;
216:     verb = API_VERBOSE_NOT;
217:     PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",verb,&icntl,&flg);
218:     if ((flg && icntl >= 0) || PetscLogPrintInfo) {
219:       verb =  icntl;
220:     }
221:     PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);

223:     PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
224:     PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscInt),row);
225:     PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
226:     PetscMalloc(((*colptr)[*n]-1)*sizeof(PetscScalar),values);
227:     PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
228:     free(tmpvalues);
229:     free(tmprows);
230:     free(tmpcolptr);

232:   }
233:   return(0);
234: }



240: /*
241:   Call clean step of PaStiX if lu->CleanUpPastix == true.
242:   Free the CSC matrix.
243:  */
244: PetscErrorCode MatDestroy_Pastix(Mat A)
245: {
246:   Mat_Pastix      *lu=(Mat_Pastix*)A->spptr;
247:   PetscErrorCode   ierr;
248:   PetscMPIInt      size=lu->commSize;

251:   if (lu && lu->CleanUpPastix) {
252:     /* Terminate instance, deallocate memories */
253:     if (size > 1){
254:       VecScatterDestroy(&lu->scat_rhs);
255:       VecDestroy(&lu->b_seq);
256:       VecScatterDestroy(&lu->scat_sol);
257:     }

259:     lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
260:     lu->iparm[IPARM_END_TASK]=API_TASK_CLEAN;

262:     PASTIX_CALL(&(lu->pastix_data),
263:                 lu->pastix_comm,
264:                 lu->n,
265:                 lu->colptr,
266:                 lu->row,
267:                 (PastixScalar*)lu->val,
268:                 lu->perm,
269:                 lu->invp,
270:                 (PastixScalar*)lu->rhs,
271:                 lu->rhsnbr,
272:                 lu->iparm,
273:                 lu->dparm);

275:     PetscFree(lu->colptr);
276:     PetscFree(lu->row);
277:     PetscFree(lu->val);
278:     PetscFree(lu->perm);
279:     PetscFree(lu->invp);
280:     MPI_Comm_free(&(lu->pastix_comm));
281:   }
282:   if (lu && lu->Destroy) {
283:     (lu->Destroy)(A);
284:   }
285:   PetscFree(A->spptr);
286:   return(0);
287: }

291: /*
292:   Gather right-hand-side.
293:   Call for Solve step.
294:   Scatter solution.
295:  */
296: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
297: {
298:   Mat_Pastix     *lu=(Mat_Pastix*)A->spptr;
299:   PetscScalar    *array;
300:   Vec             x_seq;
301:   PetscErrorCode  ierr;

304:   lu->rhsnbr = 1;
305:   x_seq = lu->b_seq;
306:   if (lu->commSize > 1){
307:     /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
308:     VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
309:     VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
310:     VecGetArray(x_seq,&array);
311:   } else {  /* size == 1 */
312:     VecCopy(b,x);
313:     VecGetArray(x,&array);
314:   }
315:   lu->rhs = array;
316:   if (lu->commSize == 1){
317:     VecRestoreArray(x,&array);
318:   } else {
319:     VecRestoreArray(x_seq,&array);
320:   }

322:   /* solve phase */
323:   /*-------------*/
324:   lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
325:   lu->iparm[IPARM_END_TASK]   = API_TASK_REFINE;
326:   lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;

328:   PASTIX_CALL(&(lu->pastix_data),
329:               lu->pastix_comm,
330:               lu->n,
331:               lu->colptr,
332:               lu->row,
333:               (PastixScalar*)lu->val,
334:               lu->perm,
335:               lu->invp,
336:               (PastixScalar*)lu->rhs,
337:               lu->rhsnbr,
338:               lu->iparm,
339:               lu->dparm);
340: 
341:   if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER] );

343:   if (lu->commSize == 1){
344:     VecRestoreArray(x,&(lu->rhs));
345:   } else {
346:     VecRestoreArray(x_seq,&(lu->rhs));
347:   }

349:   if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
350:     VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
351:     VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
352:   }
353:   return(0);
354: }

356: /*
357:   Numeric factorisation using PaStiX solver.

359:  */
362: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
363: {
364:   Mat_Pastix    *lu =(Mat_Pastix*)(F)->spptr;
365:   Mat           *tseq;
366:   PetscErrorCode 0;
367:   PetscInt       icntl;
368:   PetscInt       M=A->rmap->N;
369:   PetscBool      valOnly,flg, isSym;
370:   Mat            F_diag;
371:   IS             is_iden;
372:   Vec            b;
373:   IS             isrow;
374:   PetscBool      isSeqAIJ,isSeqSBAIJ,isMPIAIJ;


378:   PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
379:   PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
380:   PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
381:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
382:     (F)->ops->solve   = MatSolve_PaStiX;

384:     /* Initialize a PASTIX instance */
385:     MPI_Comm_dup(((PetscObject)A)->comm,&(lu->pastix_comm));
386:     MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
387:     MPI_Comm_size(lu->pastix_comm, &lu->commSize);

389:     /* Set pastix options */
390:     lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
391:     lu->iparm[IPARM_START_TASK]       = API_TASK_INIT;
392:     lu->iparm[IPARM_END_TASK]         = API_TASK_INIT;
393:     lu->rhsnbr = 1;

395:     /* Call to set default pastix options */
396:     PASTIX_CALL(&(lu->pastix_data),
397:                 lu->pastix_comm,
398:                 lu->n,
399:                 lu->colptr,
400:                 lu->row,
401:                 (PastixScalar*)lu->val,
402:                 lu->perm,
403:                 lu->invp,
404:                 (PastixScalar*)lu->rhs,
405:                 lu->rhsnbr,
406:                 lu->iparm,
407:                 lu->dparm);

409:     PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PaStiX Options","Mat");

411:     icntl=-1;
412:     lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
413:     PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
414:     if ((flg && icntl >= 0) || PetscLogPrintInfo) {
415:       lu->iparm[IPARM_VERBOSE] =  icntl;
416:     }
417:     icntl=-1;
418:     PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,PETSC_NULL);
419:     if ((flg && icntl > 0)) {
420:       lu->iparm[IPARM_THREAD_NBR] = icntl;
421:     }
422:     PetscOptionsEnd();
423:     valOnly = PETSC_FALSE;
424:   }  else {
425:     if (isSeqAIJ || isMPIAIJ)  {
426:       PetscFree(lu->colptr);
427:       PetscFree(lu->row);
428:       PetscFree(lu->val);
429:       valOnly = PETSC_FALSE;
430:     } else valOnly = PETSC_TRUE;
431:   }

433:   lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;

435:   /* convert mpi A to seq mat A */
436:   ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
437:   MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
438:   ISDestroy(&isrow);

440:   MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
441:   MatIsSymmetric(*tseq,0.0,&isSym);
442:   MatDestroyMatrices(1,&tseq);

444:   if (!lu->perm) {
445:     PetscMalloc((lu->n)*sizeof(PetscInt)   ,&(lu->perm));
446:     PetscMalloc((lu->n)*sizeof(PetscInt)   ,&(lu->invp));
447:   }

449:   if (isSym) {
450:     /* On symmetric matrix, LLT */
451:     lu->iparm[IPARM_SYM] = API_SYM_YES;
452:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
453:   } else {
454:     /* On unsymmetric matrix, LU */
455:     lu->iparm[IPARM_SYM] = API_SYM_NO;
456:     lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
457:   }

459:   /*----------------*/
460:   if (lu->matstruc == DIFFERENT_NONZERO_PATTERN){
461:     if (!(isSeqAIJ || isSeqSBAIJ)) {
462:       /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
463:         VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
464:         ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
465:         VecCreate(((PetscObject)A)->comm,&b);
466:         VecSetSizes(b,A->rmap->n,PETSC_DECIDE);
467:         VecSetFromOptions(b);

469:         VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
470:         VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
471:         ISDestroy(&is_iden);
472:         VecDestroy(&b);
473:     }
474:     lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
475:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;

477:     PASTIX_CALL(&(lu->pastix_data),
478:                 lu->pastix_comm,
479:                 lu->n,
480:                 lu->colptr,
481:                 lu->row,
482:                 (PastixScalar*)lu->val,
483:                 lu->perm,
484:                 lu->invp,
485:                 (PastixScalar*)lu->rhs,
486:                 lu->rhsnbr,
487:                 lu->iparm,
488:                 lu->dparm);
489:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
490:   } else {
491:     lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
492:     lu->iparm[IPARM_END_TASK]   = API_TASK_NUMFACT;
493:     PASTIX_CALL(&(lu->pastix_data),
494:                 lu->pastix_comm,
495:                 lu->n,
496:                 lu->colptr,
497:                 lu->row,
498:                 (PastixScalar*)lu->val,
499:                 lu->perm,
500:                 lu->invp,
501:                 (PastixScalar*)lu->rhs,
502:                 lu->rhsnbr,
503:                 lu->iparm,
504:                 lu->dparm);

506:     if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
507:   }

509:   if (lu->commSize > 1){
510:     if ((F)->factortype == MAT_FACTOR_LU){
511:       F_diag = ((Mat_MPIAIJ *)(F)->data)->A;
512:     } else {
513:       F_diag = ((Mat_MPISBAIJ *)(F)->data)->A;
514:     }
515:     F_diag->assembled = PETSC_TRUE;
516:   }
517:   (F)->assembled     = PETSC_TRUE;
518:   lu->matstruc       = SAME_NONZERO_PATTERN;
519:   lu->CleanUpPastix  = PETSC_TRUE;
520:   return(0);
521: }

523: /* Note the Petsc r and c permutations are ignored */
526: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
527: {
528:   Mat_Pastix      *lu = (Mat_Pastix*)F->spptr;

531:   lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
532:   lu->iparm[IPARM_SYM]           = API_SYM_YES;
533:   lu->matstruc                   = DIFFERENT_NONZERO_PATTERN;
534:   F->ops->lufactornumeric        = MatFactorNumeric_PaStiX;
535:   return(0);
536: }


539: /* Note the Petsc r permutation is ignored */
542: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
543: {
544:   Mat_Pastix      *lu = (Mat_Pastix*)(F)->spptr;

547:   lu->iparm[IPARM_FACTORIZATION]  = API_FACT_LLT;
548:   lu->iparm[IPARM_SYM]            = API_SYM_NO;
549:   lu->matstruc                    = DIFFERENT_NONZERO_PATTERN;
550:   (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
551:   return(0);
552: }

556: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
557: {
558:   PetscErrorCode    ierr;
559:   PetscBool         iascii;
560:   PetscViewerFormat format;

563:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
564:   if (iascii) {
565:     PetscViewerGetFormat(viewer,&format);
566:     if (format == PETSC_VIEWER_ASCII_INFO){
567:       Mat_Pastix      *lu=(Mat_Pastix*)A->spptr;

569:       PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
570:       PetscViewerASCIIPrintf(viewer,"  Matrix type :                      %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES)?"Symmetric":"Unsymmetric"));
571:       PetscViewerASCIIPrintf(viewer,"  Level of printing (0,1,2):         %d \n",lu->iparm[IPARM_VERBOSE]);
572:       PetscViewerASCIIPrintf(viewer,"  Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
573:       PetscPrintf(PETSC_COMM_SELF,"  Error :                        %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
574:     }
575:   }
576:   return(0);
577: }


580: /*MC
581:      MATSOLVERPASTIX  - A solver package providing direct solvers (LU) for distributed
582:   and sequential matrices via the external package PaStiX.

584:   Use ./configure --download-pastix to have PETSc installed with PaStiX

586:   Options Database Keys:
587: + -mat_pastix_verbose   <0,1,2>   - print level
588: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.

590:   Level: beginner

592: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage

594: M*/


599: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
600: {
601:     Mat_Pastix  *lu =(Mat_Pastix*)A->spptr;

604:     info->block_size        = 1.0;
605:     info->nz_allocated      = lu->iparm[IPARM_NNZEROS];
606:     info->nz_used           = lu->iparm[IPARM_NNZEROS];
607:     info->nz_unneeded       = 0.0;
608:     info->assemblies        = 0.0;
609:     info->mallocs           = 0.0;
610:     info->memory            = 0.0;
611:     info->fill_ratio_given  = 0;
612:     info->fill_ratio_needed = 0;
613:     info->factor_mallocs    = 0;
614:     return(0);
615: }

617: EXTERN_C_BEGIN
620: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
621: {
623:   *type = MATSOLVERPASTIX;
624:   return(0);
625: }
626: EXTERN_C_END

628: EXTERN_C_BEGIN
629: /*
630:     The seq and mpi versions of this function are the same
631: */
634: PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
635: {
636:   Mat            B;
638:   Mat_Pastix    *pastix;

641:   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
642:   /* Create the factorization matrix */
643:   MatCreate(((PetscObject)A)->comm,&B);
644:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
645:   MatSetType(B,((PetscObject)A)->type_name);
646:   MatSeqAIJSetPreallocation(B,0,PETSC_NULL);

648:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
649:   B->ops->view             = MatView_PaStiX;
650:   B->ops->getinfo          = MatGetInfo_PaStiX;
651:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix", MatFactorGetSolverPackage_pastix);
652:   B->factortype            = MAT_FACTOR_LU;

654:   PetscNewLog(B,Mat_Pastix,&pastix);
655:   pastix->CleanUpPastix             = PETSC_FALSE;
656:   pastix->isAIJ                     = PETSC_TRUE;
657:   pastix->scat_rhs                  = PETSC_NULL;
658:   pastix->scat_sol                  = PETSC_NULL;
659:   pastix->Destroy                   = B->ops->destroy;
660:   B->ops->destroy                   = MatDestroy_Pastix;
661:   B->spptr                          = (void*)pastix;

663:   *F = B;
664:   return(0);
665: }
666: EXTERN_C_END


669: EXTERN_C_BEGIN
672: PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
673: {
674:   Mat            B;
676:   Mat_Pastix    *pastix;

679:   if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
680:   /* Create the factorization matrix */
681:   MatCreate(((PetscObject)A)->comm,&B);
682:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
683:   MatSetType(B,((PetscObject)A)->type_name);
684:   MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
685:   MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);

687:   B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
688:   B->ops->view             = MatView_PaStiX;
689:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
690:   B->factortype            = MAT_FACTOR_LU;

692:   PetscNewLog(B,Mat_Pastix,&pastix);
693:   pastix->CleanUpPastix             = PETSC_FALSE;
694:   pastix->isAIJ                     = PETSC_TRUE;
695:   pastix->scat_rhs                  = PETSC_NULL;
696:   pastix->scat_sol                  = PETSC_NULL;
697:   pastix->Destroy                   = B->ops->destroy;
698:   B->ops->destroy                   = MatDestroy_Pastix;
699:   B->spptr                          = (void*)pastix;

701:   *F = B;
702:   return(0);
703: }
704: EXTERN_C_END

706: EXTERN_C_BEGIN
709: PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
710: {
711:   Mat            B;
713:   Mat_Pastix    *pastix;

716:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
717:   /* Create the factorization matrix */
718:   MatCreate(((PetscObject)A)->comm,&B);
719:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
720:   MatSetType(B,((PetscObject)A)->type_name);
721:   MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
722:   MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);

724:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
725:   B->ops->view                   = MatView_PaStiX;
726:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
727:   B->factortype                  = MAT_FACTOR_CHOLESKY;

729:   PetscNewLog(B,Mat_Pastix,&pastix);
730:   pastix->CleanUpPastix             = PETSC_FALSE;
731:   pastix->isAIJ                     = PETSC_TRUE;
732:   pastix->scat_rhs                  = PETSC_NULL;
733:   pastix->scat_sol                  = PETSC_NULL;
734:   pastix->Destroy                   = B->ops->destroy;
735:   B->ops->destroy                   = MatDestroy_Pastix;
736:   B->spptr                          = (void*)pastix;

738:   *F = B;
739:   return(0);
740: }
741: EXTERN_C_END

743: EXTERN_C_BEGIN
746: PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
747: {
748:   Mat            B;
750:   Mat_Pastix    *pastix;

753:   if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");

755:   /* Create the factorization matrix */
756:   MatCreate(((PetscObject)A)->comm,&B);
757:   MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
758:   MatSetType(B,((PetscObject)A)->type_name);
759:   MatSeqSBAIJSetPreallocation(B,1,0,PETSC_NULL);
760:   MatMPISBAIJSetPreallocation(B,1,0,PETSC_NULL,0,PETSC_NULL);

762:   B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
763:   B->ops->view                   = MatView_PaStiX;
764:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_pastix",MatFactorGetSolverPackage_pastix);
765:   B->factortype                  = MAT_FACTOR_CHOLESKY;

767:   PetscNewLog(B,Mat_Pastix,&pastix);
768:   pastix->CleanUpPastix             = PETSC_FALSE;
769:   pastix->isAIJ                     = PETSC_TRUE;
770:   pastix->scat_rhs                  = PETSC_NULL;
771:   pastix->scat_sol                  = PETSC_NULL;
772:   pastix->Destroy                   = B->ops->destroy;
773:   B->ops->destroy                   = MatDestroy_Pastix;
774:   B->spptr                          = (void*)pastix;

776:   *F = B;
777:   return(0);
778: }
779: EXTERN_C_END