Actual source code: pastix.c
petsc-3.5.0 2014-06-30
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX) && defined(__cplusplus)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #define PASTIX_CHECKMATRIX c_pastix_checkMatrix
21: #define PastixScalar COMPLEX
22: #else
23: #define PASTIX_CALL z_pastix
24: #define PASTIX_CHECKMATRIX z_pastix_checkMatrix
25: #define PastixScalar DCOMPLEX
26: #endif
28: #else /* PETSC_USE_COMPLEX */
30: #if defined(PETSC_USE_REAL_SINGLE)
31: #define PASTIX_CALL s_pastix
32: #define PASTIX_CHECKMATRIX s_pastix_checkMatrix
33: #define PastixScalar float
34: #else
35: #define PASTIX_CALL d_pastix
36: #define PASTIX_CHECKMATRIX d_pastix_checkMatrix
37: #define PastixScalar double
38: #endif
40: #endif /* PETSC_USE_COMPLEX */
42: typedef struct Mat_Pastix_ {
43: pastix_data_t *pastix_data; /* Pastix data storage structure */
44: MatStructure matstruc;
45: PetscInt n; /* Number of columns in the matrix */
46: PetscInt *colptr; /* Index of first element of each column in row and val */
47: PetscInt *row; /* Row of each element of the matrix */
48: PetscScalar *val; /* Value of each element of the matrix */
49: PetscInt *perm; /* Permutation tabular */
50: PetscInt *invp; /* Reverse permutation tabular */
51: PetscScalar *rhs; /* Rhight-hand-side member */
52: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
53: PetscInt iparm[64]; /* Integer parameters */
54: double dparm[64]; /* Floating point parameters */
55: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
56: PetscMPIInt commRank; /* MPI rank */
57: PetscMPIInt commSize; /* MPI communicator size */
58: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
59: VecScatter scat_rhs;
60: VecScatter scat_sol;
61: Vec b_seq;
62: PetscBool isAIJ;
63: PetscErrorCode (*Destroy)(Mat);
64: } Mat_Pastix;
66: extern PetscErrorCode MatDuplicate_Pastix(Mat,MatDuplicateOption,Mat*);
70: /*
71: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
73: input:
74: A - matrix in seqaij or mpisbaij (bs=1) format
75: valOnly - FALSE: spaces are allocated and values are set for the CSC
76: TRUE: Only fill values
77: output:
78: n - Size of the matrix
79: colptr - Index of first element of each column in row and val
80: row - Row of each element of the matrix
81: values - Value of each element of the matrix
82: */
83: PetscErrorCode MatConvertToCSC(Mat A,PetscBool valOnly,PetscInt *n,PetscInt **colptr,PetscInt **row,PetscScalar **values)
84: {
85: Mat_SeqAIJ *aa = (Mat_SeqAIJ*)A->data;
86: PetscInt *rowptr = aa->i;
87: PetscInt *col = aa->j;
88: PetscScalar *rvalues = aa->a;
89: PetscInt m = A->rmap->N;
90: PetscInt nnz;
91: PetscInt i,j, k;
92: PetscInt base = 1;
93: PetscInt idx;
95: PetscInt colidx;
96: PetscInt *colcount;
97: PetscBool isSBAIJ;
98: PetscBool isSeqSBAIJ;
99: PetscBool isMpiSBAIJ;
100: PetscBool isSym;
101: PetscBool flg;
102: PetscInt icntl;
103: PetscInt verb;
104: PetscInt check;
107: MatIsSymmetric(A,0.0,&isSym);
108: PetscObjectTypeCompare((PetscObject)A,MATSBAIJ,&isSBAIJ);
109: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
110: PetscObjectTypeCompare((PetscObject)A,MATMPISBAIJ,&isMpiSBAIJ);
112: *n = A->cmap->N;
114: /* PaStiX only needs triangular matrix if matrix is symmetric
115: */
116: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n)/2 + *n;
117: else nnz = aa->nz;
119: if (!valOnly) {
120: PetscMalloc(((*n)+1) *sizeof(PetscInt) ,colptr);
121: PetscMalloc(nnz *sizeof(PetscInt) ,row);
122: PetscMalloc1(nnz ,values);
124: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
125: PetscMemcpy (*colptr, rowptr, ((*n)+1)*sizeof(PetscInt));
126: for (i = 0; i < *n+1; i++) (*colptr)[i] += base;
127: PetscMemcpy (*row, col, (nnz)*sizeof(PetscInt));
128: for (i = 0; i < nnz; i++) (*row)[i] += base;
129: PetscMemcpy (*values, rvalues, (nnz)*sizeof(PetscScalar));
130: } else {
131: PetscMalloc1((*n),&colcount);
133: for (i = 0; i < m; i++) colcount[i] = 0;
134: /* Fill-in colptr */
135: for (i = 0; i < m; i++) {
136: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
137: if (!isSym || col[j] <= i) colcount[col[j]]++;
138: }
139: }
141: (*colptr)[0] = base;
142: for (j = 0; j < *n; j++) {
143: (*colptr)[j+1] = (*colptr)[j] + colcount[j];
144: /* in next loop we fill starting from (*colptr)[colidx] - base */
145: colcount[j] = -base;
146: }
148: /* Fill-in rows and values */
149: for (i = 0; i < m; i++) {
150: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
151: if (!isSym || col[j] <= i) {
152: colidx = col[j];
153: idx = (*colptr)[colidx] + colcount[colidx];
154: (*row)[idx] = i + base;
155: (*values)[idx] = rvalues[j];
156: colcount[colidx]++;
157: }
158: }
159: }
160: PetscFree(colcount);
161: }
162: } else {
163: /* Fill-in only values */
164: for (i = 0; i < m; i++) {
165: for (j = rowptr[i]; j < rowptr[i+1]; j++) {
166: colidx = col[j];
167: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) ||!isSym || col[j] <= i) {
168: /* look for the value to fill */
169: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
170: if (((*row)[k]-base) == i) {
171: (*values)[k] = rvalues[j];
172: break;
173: }
174: }
175: /* data structure of sparse matrix has changed */
176: if (k == (*colptr)[colidx + 1] - base) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"overflow on k %D",k);
177: }
178: }
179: }
180: }
182: icntl =-1;
183: check = 0;
184: PetscOptionsGetInt(((PetscObject) A)->prefix, "-mat_pastix_check", &icntl, &flg);
185: if ((flg && icntl >= 0) || PetscLogPrintInfo) check = icntl;
187: if (check == 1) {
188: PetscScalar *tmpvalues;
189: PetscInt *tmprows,*tmpcolptr;
190: tmpvalues = (PetscScalar*)malloc(nnz*sizeof(PetscScalar)); if (!tmpvalues) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
191: tmprows = (PetscInt*) malloc(nnz*sizeof(PetscInt)); if (!tmprows) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
192: tmpcolptr = (PetscInt*) malloc((*n+1)*sizeof(PetscInt)); if (!tmpcolptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_MEM,"Unable to allocate memory");
194: PetscMemcpy(tmpcolptr,*colptr,(*n+1)*sizeof(PetscInt));
195: PetscMemcpy(tmprows,*row,nnz*sizeof(PetscInt));
196: PetscMemcpy(tmpvalues,*values,nnz*sizeof(PetscScalar));
197: PetscFree(*row);
198: PetscFree(*values);
200: icntl=-1;
201: verb = API_VERBOSE_NOT;
202: /* "iparm[IPARM_VERBOSE] : level of printing (0 to 2)" */
203: PetscOptionsGetInt(((PetscObject) A)->prefix, "-mat_pastix_verbose", &icntl, &flg);
204: if ((flg && icntl >= 0) || PetscLogPrintInfo) verb = icntl;
205: PASTIX_CHECKMATRIX(MPI_COMM_WORLD,verb,((isSym != 0) ? API_SYM_YES : API_SYM_NO),API_YES,*n,&tmpcolptr,&tmprows,(PastixScalar**)&tmpvalues,NULL,1);
207: PetscMemcpy(*colptr,tmpcolptr,(*n+1)*sizeof(PetscInt));
208: PetscMalloc1(((*colptr)[*n]-1),row);
209: PetscMemcpy(*row,tmprows,((*colptr)[*n]-1)*sizeof(PetscInt));
210: PetscMalloc1(((*colptr)[*n]-1),values);
211: PetscMemcpy(*values,tmpvalues,((*colptr)[*n]-1)*sizeof(PetscScalar));
212: free(tmpvalues);
213: free(tmprows);
214: free(tmpcolptr);
216: }
217: return(0);
218: }
224: /*
225: Call clean step of PaStiX if lu->CleanUpPastix == true.
226: Free the CSC matrix.
227: */
228: PetscErrorCode MatDestroy_Pastix(Mat A)
229: {
230: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
232: PetscMPIInt size=lu->commSize;
235: if (lu && lu->CleanUpPastix) {
236: /* Terminate instance, deallocate memories */
237: if (size > 1) {
238: VecScatterDestroy(&lu->scat_rhs);
239: VecDestroy(&lu->b_seq);
240: VecScatterDestroy(&lu->scat_sol);
241: }
243: lu->iparm[IPARM_START_TASK]=API_TASK_CLEAN;
244: lu->iparm[IPARM_END_TASK] =API_TASK_CLEAN;
246: PASTIX_CALL(&(lu->pastix_data),
247: lu->pastix_comm,
248: lu->n,
249: lu->colptr,
250: lu->row,
251: (PastixScalar*)lu->val,
252: lu->perm,
253: lu->invp,
254: (PastixScalar*)lu->rhs,
255: lu->rhsnbr,
256: lu->iparm,
257: lu->dparm);
259: PetscFree(lu->colptr);
260: PetscFree(lu->row);
261: PetscFree(lu->val);
262: PetscFree(lu->perm);
263: PetscFree(lu->invp);
264: MPI_Comm_free(&(lu->pastix_comm));
265: }
266: if (lu && lu->Destroy) {
267: (lu->Destroy)(A);
268: }
269: PetscFree(A->spptr);
270: return(0);
271: }
275: /*
276: Gather right-hand-side.
277: Call for Solve step.
278: Scatter solution.
279: */
280: PetscErrorCode MatSolve_PaStiX(Mat A,Vec b,Vec x)
281: {
282: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
283: PetscScalar *array;
284: Vec x_seq;
288: lu->rhsnbr = 1;
289: x_seq = lu->b_seq;
290: if (lu->commSize > 1) {
291: /* PaStiX only supports centralized rhs. Scatter b into a seqential rhs vector */
292: VecScatterBegin(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
293: VecScatterEnd(lu->scat_rhs,b,x_seq,INSERT_VALUES,SCATTER_FORWARD);
294: VecGetArray(x_seq,&array);
295: } else { /* size == 1 */
296: VecCopy(b,x);
297: VecGetArray(x,&array);
298: }
299: lu->rhs = array;
300: if (lu->commSize == 1) {
301: VecRestoreArray(x,&array);
302: } else {
303: VecRestoreArray(x_seq,&array);
304: }
306: /* solve phase */
307: /*-------------*/
308: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
309: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
310: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
312: PASTIX_CALL(&(lu->pastix_data),
313: lu->pastix_comm,
314: lu->n,
315: lu->colptr,
316: lu->row,
317: (PastixScalar*)lu->val,
318: lu->perm,
319: lu->invp,
320: (PastixScalar*)lu->rhs,
321: lu->rhsnbr,
322: lu->iparm,
323: lu->dparm);
325: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %d\n",lu->iparm[IPARM_ERROR_NUMBER]);
327: if (lu->commSize == 1) {
328: VecRestoreArray(x,&(lu->rhs));
329: } else {
330: VecRestoreArray(x_seq,&(lu->rhs));
331: }
333: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
334: VecScatterBegin(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
335: VecScatterEnd(lu->scat_sol,x_seq,x,INSERT_VALUES,SCATTER_FORWARD);
336: }
337: return(0);
338: }
340: /*
341: Numeric factorisation using PaStiX solver.
343: */
346: PetscErrorCode MatFactorNumeric_PaStiX(Mat F,Mat A,const MatFactorInfo *info)
347: {
348: Mat_Pastix *lu =(Mat_Pastix*)(F)->spptr;
349: Mat *tseq;
350: PetscErrorCode 0;
351: PetscInt icntl;
352: PetscInt M=A->rmap->N;
353: PetscBool valOnly,flg, isSym;
354: Mat F_diag;
355: IS is_iden;
356: Vec b;
357: IS isrow;
358: PetscBool isSeqAIJ,isSeqSBAIJ,isMPIAIJ;
361: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&isSeqAIJ);
362: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&isMPIAIJ);
363: PetscObjectTypeCompare((PetscObject)A,MATSEQSBAIJ,&isSeqSBAIJ);
364: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
365: (F)->ops->solve = MatSolve_PaStiX;
367: /* Initialize a PASTIX instance */
368: MPI_Comm_dup(PetscObjectComm((PetscObject)A),&(lu->pastix_comm));
369: MPI_Comm_rank(lu->pastix_comm, &lu->commRank);
370: MPI_Comm_size(lu->pastix_comm, &lu->commSize);
372: /* Set pastix options */
373: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
374: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
375: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
377: lu->rhsnbr = 1;
379: /* Call to set default pastix options */
380: PASTIX_CALL(&(lu->pastix_data),
381: lu->pastix_comm,
382: lu->n,
383: lu->colptr,
384: lu->row,
385: (PastixScalar*)lu->val,
386: lu->perm,
387: lu->invp,
388: (PastixScalar*)lu->rhs,
389: lu->rhsnbr,
390: lu->iparm,
391: lu->dparm);
393: PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"PaStiX Options","Mat");
395: icntl = -1;
397: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
399: PetscOptionsInt("-mat_pastix_verbose","iparm[IPARM_VERBOSE] : level of printing (0 to 2)","None",lu->iparm[IPARM_VERBOSE],&icntl,&flg);
400: if ((flg && icntl >= 0) || PetscLogPrintInfo) {
401: lu->iparm[IPARM_VERBOSE] = icntl;
402: }
403: icntl=-1;
404: PetscOptionsInt("-mat_pastix_threadnbr","iparm[IPARM_THREAD_NBR] : Number of thread by MPI node","None",lu->iparm[IPARM_THREAD_NBR],&icntl,&flg);
405: if ((flg && icntl > 0)) {
406: lu->iparm[IPARM_THREAD_NBR] = icntl;
407: }
408: PetscOptionsEnd();
409: valOnly = PETSC_FALSE;
410: } else {
411: if (isSeqAIJ || isMPIAIJ) {
412: PetscFree(lu->colptr);
413: PetscFree(lu->row);
414: PetscFree(lu->val);
415: valOnly = PETSC_FALSE;
416: } else valOnly = PETSC_TRUE;
417: }
419: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
421: /* convert mpi A to seq mat A */
422: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
423: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
424: ISDestroy(&isrow);
426: MatConvertToCSC(*tseq,valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val);
427: MatIsSymmetric(*tseq,0.0,&isSym);
428: MatDestroyMatrices(1,&tseq);
430: if (!lu->perm) {
431: PetscMalloc1((lu->n),&(lu->perm));
432: PetscMalloc1((lu->n),&(lu->invp));
433: }
435: if (isSym) {
436: /* On symmetric matrix, LLT */
437: lu->iparm[IPARM_SYM] = API_SYM_YES;
438: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
439: } else {
440: /* On unsymmetric matrix, LU */
441: lu->iparm[IPARM_SYM] = API_SYM_NO;
442: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
443: }
445: /*----------------*/
446: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
447: if (!(isSeqAIJ || isSeqSBAIJ)) {
448: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
449: VecCreateSeq(PETSC_COMM_SELF,A->cmap->N,&lu->b_seq);
450: ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,&is_iden);
451: MatGetVecs(A,NULL,&b);
452: VecScatterCreate(b,is_iden,lu->b_seq,is_iden,&lu->scat_rhs);
453: VecScatterCreate(lu->b_seq,is_iden,b,is_iden,&lu->scat_sol);
454: ISDestroy(&is_iden);
455: VecDestroy(&b);
456: }
457: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
458: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
460: PASTIX_CALL(&(lu->pastix_data),
461: lu->pastix_comm,
462: lu->n,
463: lu->colptr,
464: lu->row,
465: (PastixScalar*)lu->val,
466: lu->perm,
467: lu->invp,
468: (PastixScalar*)lu->rhs,
469: lu->rhsnbr,
470: lu->iparm,
471: lu->dparm);
472: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
473: } else {
474: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
475: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
476: PASTIX_CALL(&(lu->pastix_data),
477: lu->pastix_comm,
478: lu->n,
479: lu->colptr,
480: lu->row,
481: (PastixScalar*)lu->val,
482: lu->perm,
483: lu->invp,
484: (PastixScalar*)lu->rhs,
485: lu->rhsnbr,
486: lu->iparm,
487: lu->dparm);
489: if (lu->iparm[IPARM_ERROR_NUMBER] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%d\n",lu->iparm[IPARM_ERROR_NUMBER]);
490: }
492: if (lu->commSize > 1) {
493: if ((F)->factortype == MAT_FACTOR_LU) {
494: F_diag = ((Mat_MPIAIJ*)(F)->data)->A;
495: } else {
496: F_diag = ((Mat_MPISBAIJ*)(F)->data)->A;
497: }
498: F_diag->assembled = PETSC_TRUE;
499: }
500: (F)->assembled = PETSC_TRUE;
501: lu->matstruc = SAME_NONZERO_PATTERN;
502: lu->CleanUpPastix = PETSC_TRUE;
503: return(0);
504: }
506: /* Note the Petsc r and c permutations are ignored */
509: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
510: {
511: Mat_Pastix *lu = (Mat_Pastix*)F->spptr;
514: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
515: lu->iparm[IPARM_SYM] = API_SYM_YES;
516: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
517: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
518: return(0);
519: }
522: /* Note the Petsc r permutation is ignored */
525: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F,Mat A,IS r,const MatFactorInfo *info)
526: {
527: Mat_Pastix *lu = (Mat_Pastix*)(F)->spptr;
530: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
531: lu->iparm[IPARM_SYM] = API_SYM_NO;
532: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
533: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
534: return(0);
535: }
539: PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer)
540: {
541: PetscErrorCode ierr;
542: PetscBool iascii;
543: PetscViewerFormat format;
546: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
547: if (iascii) {
548: PetscViewerGetFormat(viewer,&format);
549: if (format == PETSC_VIEWER_ASCII_INFO) {
550: Mat_Pastix *lu=(Mat_Pastix*)A->spptr;
552: PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");
553: PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));
554: PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);
555: PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);
556: PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);
557: }
558: }
559: return(0);
560: }
563: /*MC
564: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
565: and sequential matrices via the external package PaStiX.
567: Use ./configure --download-pastix to have PETSc installed with PaStiX
569: Options Database Keys:
570: + -mat_pastix_verbose <0,1,2> - print level
571: - -mat_pastix_threadnbr <integer> - Set the thread number by MPI task.
573: Level: beginner
575: .seealso: PCFactorSetMatSolverPackage(), MatSolverPackage
577: M*/
582: PetscErrorCode MatGetInfo_PaStiX(Mat A,MatInfoType flag,MatInfo *info)
583: {
584: Mat_Pastix *lu =(Mat_Pastix*)A->spptr;
587: info->block_size = 1.0;
588: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
589: info->nz_used = lu->iparm[IPARM_NNZEROS];
590: info->nz_unneeded = 0.0;
591: info->assemblies = 0.0;
592: info->mallocs = 0.0;
593: info->memory = 0.0;
594: info->fill_ratio_given = 0;
595: info->fill_ratio_needed = 0;
596: info->factor_mallocs = 0;
597: return(0);
598: }
602: PetscErrorCode MatFactorGetSolverPackage_pastix(Mat A,const MatSolverPackage *type)
603: {
605: *type = MATSOLVERPASTIX;
606: return(0);
607: }
609: /*
610: The seq and mpi versions of this function are the same
611: */
614: PETSC_EXTERN PetscErrorCode MatGetFactor_seqaij_pastix(Mat A,MatFactorType ftype,Mat *F)
615: {
616: Mat B;
618: Mat_Pastix *pastix;
621: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
622: /* Create the factorization matrix */
623: MatCreate(PetscObjectComm((PetscObject)A),&B);
624: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
625: MatSetType(B,((PetscObject)A)->type_name);
626: MatSeqAIJSetPreallocation(B,0,NULL);
628: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
629: B->ops->view = MatView_PaStiX;
630: B->ops->getinfo = MatGetInfo_PaStiX;
632: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
634: B->factortype = MAT_FACTOR_LU;
636: PetscNewLog(B,&pastix);
638: pastix->CleanUpPastix = PETSC_FALSE;
639: pastix->isAIJ = PETSC_TRUE;
640: pastix->scat_rhs = NULL;
641: pastix->scat_sol = NULL;
642: pastix->Destroy = B->ops->destroy;
643: B->ops->destroy = MatDestroy_Pastix;
644: B->spptr = (void*)pastix;
646: *F = B;
647: return(0);
648: }
652: PETSC_EXTERN PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A,MatFactorType ftype,Mat *F)
653: {
654: Mat B;
656: Mat_Pastix *pastix;
659: if (ftype != MAT_FACTOR_LU) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
660: /* Create the factorization matrix */
661: MatCreate(PetscObjectComm((PetscObject)A),&B);
662: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
663: MatSetType(B,((PetscObject)A)->type_name);
664: MatSeqAIJSetPreallocation(B,0,NULL);
665: MatMPIAIJSetPreallocation(B,0,NULL,0,NULL);
667: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
668: B->ops->view = MatView_PaStiX;
670: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
672: B->factortype = MAT_FACTOR_LU;
674: PetscNewLog(B,&pastix);
676: pastix->CleanUpPastix = PETSC_FALSE;
677: pastix->isAIJ = PETSC_TRUE;
678: pastix->scat_rhs = NULL;
679: pastix->scat_sol = NULL;
680: pastix->Destroy = B->ops->destroy;
681: B->ops->destroy = MatDestroy_Pastix;
682: B->spptr = (void*)pastix;
684: *F = B;
685: return(0);
686: }
690: PETSC_EXTERN PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
691: {
692: Mat B;
694: Mat_Pastix *pastix;
697: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
698: /* Create the factorization matrix */
699: MatCreate(PetscObjectComm((PetscObject)A),&B);
700: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
701: MatSetType(B,((PetscObject)A)->type_name);
702: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
703: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
705: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
706: B->ops->view = MatView_PaStiX;
708: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
710: B->factortype = MAT_FACTOR_CHOLESKY;
712: PetscNewLog(B,&pastix);
714: pastix->CleanUpPastix = PETSC_FALSE;
715: pastix->isAIJ = PETSC_TRUE;
716: pastix->scat_rhs = NULL;
717: pastix->scat_sol = NULL;
718: pastix->Destroy = B->ops->destroy;
719: B->ops->destroy = MatDestroy_Pastix;
720: B->spptr = (void*)pastix;
722: *F = B;
723: return(0);
724: }
728: PETSC_EXTERN PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A,MatFactorType ftype,Mat *F)
729: {
730: Mat B;
732: Mat_Pastix *pastix;
735: if (ftype != MAT_FACTOR_CHOLESKY) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
737: /* Create the factorization matrix */
738: MatCreate(PetscObjectComm((PetscObject)A),&B);
739: MatSetSizes(B,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);
740: MatSetType(B,((PetscObject)A)->type_name);
741: MatSeqSBAIJSetPreallocation(B,1,0,NULL);
742: MatMPISBAIJSetPreallocation(B,1,0,NULL,0,NULL);
744: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
745: B->ops->view = MatView_PaStiX;
747: PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_pastix);
749: B->factortype = MAT_FACTOR_CHOLESKY;
751: PetscNewLog(B,&pastix);
753: pastix->CleanUpPastix = PETSC_FALSE;
754: pastix->isAIJ = PETSC_TRUE;
755: pastix->scat_rhs = NULL;
756: pastix->scat_sol = NULL;
757: pastix->Destroy = B->ops->destroy;
758: B->ops->destroy = MatDestroy_Pastix;
759: B->spptr = (void*)pastix;
761: *F = B;
762: return(0);
763: }