Actual source code: pastix.c
1: /*
2: Provides an interface to the PaStiX sparse solver
3: */
4: #include <../src/mat/impls/aij/seq/aij.h>
5: #include <../src/mat/impls/aij/mpi/mpiaij.h>
6: #include <../src/mat/impls/sbaij/seq/sbaij.h>
7: #include <../src/mat/impls/sbaij/mpi/mpisbaij.h>
9: #if defined(PETSC_USE_COMPLEX)
10: #define _H_COMPLEX
11: #endif
13: EXTERN_C_BEGIN
14: #include <pastix.h>
15: EXTERN_C_END
17: #if defined(PETSC_USE_COMPLEX)
18: #if defined(PETSC_USE_REAL_SINGLE)
19: #define PASTIX_CALL c_pastix
20: #else
21: #define PASTIX_CALL z_pastix
22: #endif
24: #else /* PETSC_USE_COMPLEX */
26: #if defined(PETSC_USE_REAL_SINGLE)
27: #define PASTIX_CALL s_pastix
28: #else
29: #define PASTIX_CALL d_pastix
30: #endif
32: #endif /* PETSC_USE_COMPLEX */
34: typedef PetscScalar PastixScalar;
36: typedef struct Mat_Pastix_ {
37: pastix_data_t *pastix_data; /* Pastix data storage structure */
38: MatStructure matstruc;
39: PetscInt n; /* Number of columns in the matrix */
40: PetscInt *colptr; /* Index of first element of each column in row and val */
41: PetscInt *row; /* Row of each element of the matrix */
42: PetscScalar *val; /* Value of each element of the matrix */
43: PetscInt *perm; /* Permutation tabular */
44: PetscInt *invp; /* Reverse permutation tabular */
45: PetscScalar *rhs; /* Rhight-hand-side member */
46: PetscInt rhsnbr; /* Rhight-hand-side number (must be 1) */
47: PetscInt iparm[IPARM_SIZE]; /* Integer parameters */
48: double dparm[DPARM_SIZE]; /* Floating point parameters */
49: MPI_Comm pastix_comm; /* PaStiX MPI communicator */
50: PetscMPIInt commRank; /* MPI rank */
51: PetscMPIInt commSize; /* MPI communicator size */
52: PetscBool CleanUpPastix; /* Boolean indicating if we call PaStiX clean step */
53: VecScatter scat_rhs;
54: VecScatter scat_sol;
55: Vec b_seq;
56: } Mat_Pastix;
58: extern PetscErrorCode MatDuplicate_Pastix(Mat, MatDuplicateOption, Mat *);
60: /*
61: convert Petsc seqaij matrix to CSC: colptr[n], row[nz], val[nz]
63: input:
64: A - matrix in seqaij or mpisbaij (bs=1) format
65: valOnly - FALSE: spaces are allocated and values are set for the CSC
66: TRUE: Only fill values
67: output:
68: n - Size of the matrix
69: colptr - Index of first element of each column in row and val
70: row - Row of each element of the matrix
71: values - Value of each element of the matrix
72: */
73: PetscErrorCode MatConvertToCSC(Mat A, PetscBool valOnly, PetscInt *n, PetscInt **colptr, PetscInt **row, PetscScalar **values)
74: {
75: Mat_SeqAIJ *aa = (Mat_SeqAIJ *)A->data;
76: PetscInt *rowptr = aa->i;
77: PetscInt *col = aa->j;
78: PetscScalar *rvalues = aa->a;
79: PetscInt m = A->rmap->N;
80: PetscInt nnz;
81: PetscInt i, j, k;
82: PetscInt base = 1;
83: PetscInt idx;
84: PetscInt colidx;
85: PetscInt *colcount;
86: PetscBool isSBAIJ;
87: PetscBool isSeqSBAIJ;
88: PetscBool isMpiSBAIJ;
89: PetscBool isSym;
91: PetscFunctionBegin;
92: PetscCall(MatIsSymmetric(A, 0.0, &isSym));
93: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSBAIJ, &isSBAIJ));
94: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
95: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPISBAIJ, &isMpiSBAIJ));
97: *n = A->cmap->N;
99: /* PaStiX only needs triangular matrix if matrix is symmetric
100: */
101: if (isSym && !(isSBAIJ || isSeqSBAIJ || isMpiSBAIJ)) nnz = (aa->nz - *n) / 2 + *n;
102: else nnz = aa->nz;
104: if (!valOnly) {
105: PetscCall(PetscMalloc1((*n) + 1, colptr));
106: PetscCall(PetscMalloc1(nnz, row));
107: PetscCall(PetscMalloc1(nnz, values));
109: if (isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) {
110: PetscCall(PetscArraycpy(*colptr, rowptr, (*n) + 1));
111: for (i = 0; i < *n + 1; i++) (*colptr)[i] += base;
112: PetscCall(PetscArraycpy(*row, col, nnz));
113: for (i = 0; i < nnz; i++) (*row)[i] += base;
114: PetscCall(PetscArraycpy(*values, rvalues, nnz));
115: } else {
116: PetscCall(PetscMalloc1(*n, &colcount));
118: for (i = 0; i < m; i++) colcount[i] = 0;
119: /* Fill-in colptr */
120: for (i = 0; i < m; i++) {
121: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
122: if (!isSym || col[j] <= i) colcount[col[j]]++;
123: }
124: }
126: (*colptr)[0] = base;
127: for (j = 0; j < *n; j++) {
128: (*colptr)[j + 1] = (*colptr)[j] + colcount[j];
129: /* in next loop we fill starting from (*colptr)[colidx] - base */
130: colcount[j] = -base;
131: }
133: /* Fill-in rows and values */
134: for (i = 0; i < m; i++) {
135: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
136: if (!isSym || col[j] <= i) {
137: colidx = col[j];
138: idx = (*colptr)[colidx] + colcount[colidx];
139: (*row)[idx] = i + base;
140: (*values)[idx] = rvalues[j];
141: colcount[colidx]++;
142: }
143: }
144: }
145: PetscCall(PetscFree(colcount));
146: }
147: } else {
148: /* Fill-in only values */
149: for (i = 0; i < m; i++) {
150: for (j = rowptr[i]; j < rowptr[i + 1]; j++) {
151: colidx = col[j];
152: if ((isSBAIJ || isSeqSBAIJ || isMpiSBAIJ) || !isSym || col[j] <= i) {
153: /* look for the value to fill */
154: for (k = (*colptr)[colidx] - base; k < (*colptr)[colidx + 1] - base; k++) {
155: if (((*row)[k] - base) == i) {
156: (*values)[k] = rvalues[j];
157: break;
158: }
159: }
160: /* data structure of sparse matrix has changed */
161: PetscCheck(k != (*colptr)[colidx + 1] - base, PETSC_COMM_SELF, PETSC_ERR_PLIB, "overflow on k %" PetscInt_FMT, k);
162: }
163: }
164: }
165: }
166: PetscFunctionReturn(PETSC_SUCCESS);
167: }
169: /*
170: Call clean step of PaStiX if lu->CleanUpPastix == true.
171: Free the CSC matrix.
172: */
173: PetscErrorCode MatDestroy_Pastix(Mat A)
174: {
175: Mat_Pastix *lu = (Mat_Pastix *)A->data;
177: PetscFunctionBegin;
178: if (lu->CleanUpPastix) {
179: /* Terminate instance, deallocate memories */
180: PetscCall(VecScatterDestroy(&lu->scat_rhs));
181: PetscCall(VecDestroy(&lu->b_seq));
182: PetscCall(VecScatterDestroy(&lu->scat_sol));
184: lu->iparm[IPARM_START_TASK] = API_TASK_CLEAN;
185: lu->iparm[IPARM_END_TASK] = API_TASK_CLEAN;
187: PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
188: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in destroy: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
189: PetscCall(PetscFree(lu->colptr));
190: PetscCall(PetscFree(lu->row));
191: PetscCall(PetscFree(lu->val));
192: PetscCall(PetscFree(lu->perm));
193: PetscCall(PetscFree(lu->invp));
194: PetscCallMPI(MPI_Comm_free(&(lu->pastix_comm)));
195: }
196: PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatFactorGetSolverType_C", NULL));
197: PetscCall(PetscFree(A->data));
198: PetscFunctionReturn(PETSC_SUCCESS);
199: }
201: /*
202: Gather right-hand-side.
203: Call for Solve step.
204: Scatter solution.
205: */
206: PetscErrorCode MatSolve_PaStiX(Mat A, Vec b, Vec x)
207: {
208: Mat_Pastix *lu = (Mat_Pastix *)A->data;
209: PetscScalar *array;
210: Vec x_seq;
212: PetscFunctionBegin;
213: lu->rhsnbr = 1;
214: x_seq = lu->b_seq;
215: if (lu->commSize > 1) {
216: /* PaStiX only supports centralized rhs. Scatter b into a sequential rhs vector */
217: PetscCall(VecScatterBegin(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
218: PetscCall(VecScatterEnd(lu->scat_rhs, b, x_seq, INSERT_VALUES, SCATTER_FORWARD));
219: PetscCall(VecGetArray(x_seq, &array));
220: } else { /* size == 1 */
221: PetscCall(VecCopy(b, x));
222: PetscCall(VecGetArray(x, &array));
223: }
224: lu->rhs = array;
225: if (lu->commSize == 1) {
226: PetscCall(VecRestoreArray(x, &array));
227: } else {
228: PetscCall(VecRestoreArray(x_seq, &array));
229: }
231: /* solve phase */
232: lu->iparm[IPARM_START_TASK] = API_TASK_SOLVE;
233: lu->iparm[IPARM_END_TASK] = API_TASK_REFINE;
234: lu->iparm[IPARM_RHS_MAKING] = API_RHS_B;
236: PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
237: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in solve phase: lu->iparm[IPARM_ERROR_NUMBER] = %" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
239: if (lu->commSize == 1) {
240: PetscCall(VecRestoreArray(x, &(lu->rhs)));
241: } else {
242: PetscCall(VecRestoreArray(x_seq, &(lu->rhs)));
243: }
245: if (lu->commSize > 1) { /* convert PaStiX centralized solution to petsc mpi x */
246: PetscCall(VecScatterBegin(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
247: PetscCall(VecScatterEnd(lu->scat_sol, x_seq, x, INSERT_VALUES, SCATTER_FORWARD));
248: }
249: PetscFunctionReturn(PETSC_SUCCESS);
250: }
252: /*
253: Numeric factorisation using PaStiX solver.
255: */
256: PetscErrorCode MatFactorNumeric_PaStiX(Mat F, Mat A, const MatFactorInfo *info)
257: {
258: Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
259: Mat *tseq;
260: PetscInt icntl;
261: PetscInt M = A->rmap->N;
262: PetscBool valOnly, flg, isSym;
263: IS is_iden;
264: Vec b;
265: IS isrow;
266: PetscBool isSeqAIJ, isSeqSBAIJ, isMPIAIJ;
268: PetscFunctionBegin;
269: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQAIJ, &isSeqAIJ));
270: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATMPIAIJ, &isMPIAIJ));
271: PetscCall(PetscObjectTypeCompare((PetscObject)A, MATSEQSBAIJ, &isSeqSBAIJ));
272: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
273: (F)->ops->solve = MatSolve_PaStiX;
275: /* Initialize a PASTIX instance */
276: PetscCallMPI(MPI_Comm_dup(PetscObjectComm((PetscObject)A), &(lu->pastix_comm)));
277: PetscCallMPI(MPI_Comm_rank(lu->pastix_comm, &lu->commRank));
278: PetscCallMPI(MPI_Comm_size(lu->pastix_comm, &lu->commSize));
280: /* Set pastix options */
281: lu->iparm[IPARM_MODIFY_PARAMETER] = API_NO;
282: lu->iparm[IPARM_START_TASK] = API_TASK_INIT;
283: lu->iparm[IPARM_END_TASK] = API_TASK_INIT;
285: lu->rhsnbr = 1;
287: /* Call to set default pastix options */
288: PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
289: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in MatFactorNumeric: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
291: PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "PaStiX Options", "Mat");
292: icntl = -1;
293: lu->iparm[IPARM_VERBOSE] = API_VERBOSE_NOT;
294: PetscCall(PetscOptionsInt("-mat_pastix_verbose", "iparm[IPARM_VERBOSE] : level of printing (0 to 2)", "None", lu->iparm[IPARM_VERBOSE], &icntl, &flg));
295: if ((flg && icntl >= 0) || PetscLogPrintInfo) lu->iparm[IPARM_VERBOSE] = icntl;
296: icntl = -1;
297: PetscCall(PetscOptionsInt("-mat_pastix_threadnbr", "iparm[IPARM_THREAD_NBR] : Number of thread by MPI node", "None", lu->iparm[IPARM_THREAD_NBR], &icntl, &flg));
298: if ((flg && icntl > 0)) lu->iparm[IPARM_THREAD_NBR] = icntl;
299: PetscOptionsEnd();
300: valOnly = PETSC_FALSE;
301: } else {
302: if (isSeqAIJ || isMPIAIJ) {
303: PetscCall(PetscFree(lu->colptr));
304: PetscCall(PetscFree(lu->row));
305: PetscCall(PetscFree(lu->val));
306: valOnly = PETSC_FALSE;
307: } else valOnly = PETSC_TRUE;
308: }
310: lu->iparm[IPARM_MATRIX_VERIFICATION] = API_YES;
312: /* convert mpi A to seq mat A */
313: PetscCall(ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow));
314: PetscCall(MatCreateSubMatrices(A, 1, &isrow, &isrow, MAT_INITIAL_MATRIX, &tseq));
315: PetscCall(ISDestroy(&isrow));
317: PetscCall(MatConvertToCSC(*tseq, valOnly, &lu->n, &lu->colptr, &lu->row, &lu->val));
318: PetscCall(MatIsSymmetric(*tseq, 0.0, &isSym));
319: PetscCall(MatDestroyMatrices(1, &tseq));
321: if (!lu->perm) {
322: PetscCall(PetscMalloc1(lu->n, &(lu->perm)));
323: PetscCall(PetscMalloc1(lu->n, &(lu->invp)));
324: }
326: if (isSym) {
327: /* On symmetric matrix, LLT */
328: lu->iparm[IPARM_SYM] = API_SYM_YES;
329: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LDLT;
330: } else {
331: /* On unsymmetric matrix, LU */
332: lu->iparm[IPARM_SYM] = API_SYM_NO;
333: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
334: }
336: if (lu->matstruc == DIFFERENT_NONZERO_PATTERN) {
337: if (!(isSeqAIJ || isSeqSBAIJ) && !lu->b_seq) {
338: /* PaStiX only supports centralized rhs. Create scatter scat_rhs for repeated use in MatSolve() */
339: PetscCall(VecCreateSeq(PETSC_COMM_SELF, A->cmap->N, &lu->b_seq));
340: PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, &is_iden));
341: PetscCall(MatCreateVecs(A, NULL, &b));
342: PetscCall(VecScatterCreate(b, is_iden, lu->b_seq, is_iden, &lu->scat_rhs));
343: PetscCall(VecScatterCreate(lu->b_seq, is_iden, b, is_iden, &lu->scat_sol));
344: PetscCall(ISDestroy(&is_iden));
345: PetscCall(VecDestroy(&b));
346: }
347: lu->iparm[IPARM_START_TASK] = API_TASK_ORDERING;
348: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
350: PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
351: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
352: } else {
353: lu->iparm[IPARM_START_TASK] = API_TASK_NUMFACT;
354: lu->iparm[IPARM_END_TASK] = API_TASK_NUMFACT;
355: PASTIX_CALL(&(lu->pastix_data), lu->pastix_comm, lu->n, lu->colptr, lu->row, (PastixScalar *)lu->val, lu->perm, lu->invp, (PastixScalar *)lu->rhs, lu->rhsnbr, lu->iparm, lu->dparm);
356: PetscCheck(lu->iparm[IPARM_ERROR_NUMBER] == 0, PETSC_COMM_SELF, PETSC_ERR_LIB, "Error reported by PaStiX in analysis phase: iparm(IPARM_ERROR_NUMBER)=%" PetscInt_FMT, lu->iparm[IPARM_ERROR_NUMBER]);
357: }
359: (F)->assembled = PETSC_TRUE;
360: lu->matstruc = SAME_NONZERO_PATTERN;
361: lu->CleanUpPastix = PETSC_TRUE;
362: PetscFunctionReturn(PETSC_SUCCESS);
363: }
365: /* Note the Petsc r and c permutations are ignored */
366: PetscErrorCode MatLUFactorSymbolic_AIJPASTIX(Mat F, Mat A, IS r, IS c, const MatFactorInfo *info)
367: {
368: Mat_Pastix *lu = (Mat_Pastix *)F->data;
370: PetscFunctionBegin;
371: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LU;
372: lu->iparm[IPARM_SYM] = API_SYM_YES;
373: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
374: F->ops->lufactornumeric = MatFactorNumeric_PaStiX;
375: PetscFunctionReturn(PETSC_SUCCESS);
376: }
378: PetscErrorCode MatCholeskyFactorSymbolic_SBAIJPASTIX(Mat F, Mat A, IS r, const MatFactorInfo *info)
379: {
380: Mat_Pastix *lu = (Mat_Pastix *)(F)->data;
382: PetscFunctionBegin;
383: lu->iparm[IPARM_FACTORIZATION] = API_FACT_LLT;
384: lu->iparm[IPARM_SYM] = API_SYM_NO;
385: lu->matstruc = DIFFERENT_NONZERO_PATTERN;
386: (F)->ops->choleskyfactornumeric = MatFactorNumeric_PaStiX;
387: PetscFunctionReturn(PETSC_SUCCESS);
388: }
390: PetscErrorCode MatView_PaStiX(Mat A, PetscViewer viewer)
391: {
392: PetscBool iascii;
393: PetscViewerFormat format;
395: PetscFunctionBegin;
396: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
397: if (iascii) {
398: PetscCall(PetscViewerGetFormat(viewer, &format));
399: if (format == PETSC_VIEWER_ASCII_INFO) {
400: Mat_Pastix *lu = (Mat_Pastix *)A->data;
402: PetscCall(PetscViewerASCIIPrintf(viewer, "PaStiX run parameters:\n"));
403: PetscCall(PetscViewerASCIIPrintf(viewer, " Matrix type : %s \n", ((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric")));
404: PetscCall(PetscViewerASCIIPrintf(viewer, " Level of printing (0,1,2): %" PetscInt_FMT " \n", lu->iparm[IPARM_VERBOSE]));
405: PetscCall(PetscViewerASCIIPrintf(viewer, " Number of refinements iterations : %" PetscInt_FMT " \n", lu->iparm[IPARM_NBITER]));
406: PetscCall(PetscPrintf(PETSC_COMM_SELF, " Error : %g \n", lu->dparm[DPARM_RELATIVE_ERROR]));
407: }
408: }
409: PetscFunctionReturn(PETSC_SUCCESS);
410: }
412: /*MC
413: MATSOLVERPASTIX - A solver package providing direct solvers (LU) for distributed
414: and sequential matrices via the external package PaStiX.
416: Use `./configure` `--download-pastix` `--download-ptscotch` to have PETSc installed with PasTiX
418: Use `-pc_type lu` `-pc_factor_mat_solver_type pastix` to use this direct solver
420: Options Database Keys:
421: + -mat_pastix_verbose <0,1,2> - print level of information messages from PaStiX
422: - -mat_pastix_threadnbr <integer> - Set the number of threads for each MPI process
424: Notes:
425: This only works for matrices with symmetric nonzero structure, if you pass it a matrix with
426: nonsymmetric structure PasTiX, and hence, PETSc return with an error.
428: Level: beginner
430: .seealso: [](chapter_matrices), `Mat`, `PCFactorSetMatSolverType()`, `MatSolverType`, `MatGetFactor()`
431: M*/
433: PetscErrorCode MatGetInfo_PaStiX(Mat A, MatInfoType flag, MatInfo *info)
434: {
435: Mat_Pastix *lu = (Mat_Pastix *)A->data;
437: PetscFunctionBegin;
438: info->block_size = 1.0;
439: info->nz_allocated = lu->iparm[IPARM_NNZEROS];
440: info->nz_used = lu->iparm[IPARM_NNZEROS];
441: info->nz_unneeded = 0.0;
442: info->assemblies = 0.0;
443: info->mallocs = 0.0;
444: info->memory = 0.0;
445: info->fill_ratio_given = 0;
446: info->fill_ratio_needed = 0;
447: info->factor_mallocs = 0;
448: PetscFunctionReturn(PETSC_SUCCESS);
449: }
451: static PetscErrorCode MatFactorGetSolverType_pastix(Mat A, MatSolverType *type)
452: {
453: PetscFunctionBegin;
454: *type = MATSOLVERPASTIX;
455: PetscFunctionReturn(PETSC_SUCCESS);
456: }
458: /*
459: The seq and mpi versions of this function are the same
460: */
461: static PetscErrorCode MatGetFactor_seqaij_pastix(Mat A, MatFactorType ftype, Mat *F)
462: {
463: Mat B;
464: Mat_Pastix *pastix;
466: PetscFunctionBegin;
467: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
468: /* Create the factorization matrix */
469: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
470: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
471: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
472: PetscCall(MatSetUp(B));
474: B->trivialsymbolic = PETSC_TRUE;
475: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
476: B->ops->view = MatView_PaStiX;
477: B->ops->getinfo = MatGetInfo_PaStiX;
479: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
481: B->factortype = MAT_FACTOR_LU;
483: /* set solvertype */
484: PetscCall(PetscFree(B->solvertype));
485: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
487: PetscCall(PetscNew(&pastix));
489: pastix->CleanUpPastix = PETSC_FALSE;
490: pastix->scat_rhs = NULL;
491: pastix->scat_sol = NULL;
492: B->ops->getinfo = MatGetInfo_External;
493: B->ops->destroy = MatDestroy_Pastix;
494: B->data = (void *)pastix;
496: *F = B;
497: PetscFunctionReturn(PETSC_SUCCESS);
498: }
500: static PetscErrorCode MatGetFactor_mpiaij_pastix(Mat A, MatFactorType ftype, Mat *F)
501: {
502: Mat B;
503: Mat_Pastix *pastix;
505: PetscFunctionBegin;
506: PetscCheck(ftype == MAT_FACTOR_LU, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc AIJ matrices with PaStiX Cholesky, use SBAIJ matrix");
507: /* Create the factorization matrix */
508: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
509: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
510: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
511: PetscCall(MatSetUp(B));
513: B->trivialsymbolic = PETSC_TRUE;
514: B->ops->lufactorsymbolic = MatLUFactorSymbolic_AIJPASTIX;
515: B->ops->view = MatView_PaStiX;
516: B->ops->getinfo = MatGetInfo_PaStiX;
517: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
519: B->factortype = MAT_FACTOR_LU;
521: /* set solvertype */
522: PetscCall(PetscFree(B->solvertype));
523: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
525: PetscCall(PetscNew(&pastix));
527: pastix->CleanUpPastix = PETSC_FALSE;
528: pastix->scat_rhs = NULL;
529: pastix->scat_sol = NULL;
530: B->ops->getinfo = MatGetInfo_External;
531: B->ops->destroy = MatDestroy_Pastix;
532: B->data = (void *)pastix;
534: *F = B;
535: PetscFunctionReturn(PETSC_SUCCESS);
536: }
538: static PetscErrorCode MatGetFactor_seqsbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
539: {
540: Mat B;
541: Mat_Pastix *pastix;
543: PetscFunctionBegin;
544: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
545: /* Create the factorization matrix */
546: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
547: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
548: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
549: PetscCall(MatSetUp(B));
551: B->trivialsymbolic = PETSC_TRUE;
552: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
553: B->ops->view = MatView_PaStiX;
554: B->ops->getinfo = MatGetInfo_PaStiX;
555: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
557: B->factortype = MAT_FACTOR_CHOLESKY;
559: /* set solvertype */
560: PetscCall(PetscFree(B->solvertype));
561: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
563: PetscCall(PetscNew(&pastix));
565: pastix->CleanUpPastix = PETSC_FALSE;
566: pastix->scat_rhs = NULL;
567: pastix->scat_sol = NULL;
568: B->ops->getinfo = MatGetInfo_External;
569: B->ops->destroy = MatDestroy_Pastix;
570: B->data = (void *)pastix;
571: *F = B;
572: PetscFunctionReturn(PETSC_SUCCESS);
573: }
575: static PetscErrorCode MatGetFactor_mpisbaij_pastix(Mat A, MatFactorType ftype, Mat *F)
576: {
577: Mat B;
578: Mat_Pastix *pastix;
580: PetscFunctionBegin;
581: PetscCheck(ftype == MAT_FACTOR_CHOLESKY, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot use PETSc SBAIJ matrices with PaStiX LU, use AIJ matrix");
583: /* Create the factorization matrix */
584: PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &B));
585: PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
586: PetscCall(PetscStrallocpy("pastix", &((PetscObject)B)->type_name));
587: PetscCall(MatSetUp(B));
589: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_SBAIJPASTIX;
590: B->ops->view = MatView_PaStiX;
591: B->ops->getinfo = MatGetInfo_PaStiX;
592: B->ops->destroy = MatDestroy_Pastix;
593: PetscCall(PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_pastix));
595: B->factortype = MAT_FACTOR_CHOLESKY;
597: /* set solvertype */
598: PetscCall(PetscFree(B->solvertype));
599: PetscCall(PetscStrallocpy(MATSOLVERPASTIX, &B->solvertype));
601: PetscCall(PetscNew(&pastix));
603: pastix->CleanUpPastix = PETSC_FALSE;
604: pastix->scat_rhs = NULL;
605: pastix->scat_sol = NULL;
606: B->data = (void *)pastix;
608: *F = B;
609: PetscFunctionReturn(PETSC_SUCCESS);
610: }
612: PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Pastix(void)
613: {
614: PetscFunctionBegin;
615: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPIAIJ, MAT_FACTOR_LU, MatGetFactor_mpiaij_pastix));
616: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQAIJ, MAT_FACTOR_LU, MatGetFactor_seqaij_pastix));
617: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATMPISBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_mpisbaij_pastix));
618: PetscCall(MatSolverTypeRegister(MATSOLVERPASTIX, MATSEQSBAIJ, MAT_FACTOR_CHOLESKY, MatGetFactor_seqsbaij_pastix));
619: PetscFunctionReturn(PETSC_SUCCESS);
620: }