Actual source code: petscmat.h

  1: /*
  2:      Include file for the matrix component of PETSc
  3: */
  4: #ifndef __PETSCMAT_H
 6:  #include petscvec.h

  9: /*S
 10:      Mat - Abstract PETSc matrix object

 12:    Level: beginner

 14:   Concepts: matrix; linear operator

 16: .seealso:  MatCreate(), MatType, MatSetType()
 17: S*/
 18: typedef struct _p_Mat*           Mat;

 20: /*E
 21:     MatType - String with the name of a PETSc matrix or the creation function
 22:        with an optional dynamic library name, for example
 23:        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()

 25:    Level: beginner

 27: .seealso: MatSetType(), Mat, MatSolverPackage
 28: E*/
 29: #define MatType char*
 30: #define MATSAME            "same"
 31: #define MATMAIJ            "maij"
 32: #define MATSEQMAIJ         "seqmaij"
 33: #define MATMPIMAIJ         "mpimaij"
 34: #define MATIS              "is"
 35: #define MATAIJ             "aij"
 36: #define MATSEQAIJ          "seqaij"
 37: #define MATSEQAIJPTHREAD   "seqaijpthread"
 38: #define MATAIJPTHREAD      "aijpthread"
 39: #define MATMPIAIJ          "mpiaij"
 40: #define MATAIJCRL          "aijcrl"
 41: #define MATSEQAIJCRL       "seqaijcrl"
 42: #define MATMPIAIJCRL       "mpiaijcrl"
 43: #define MATAIJCUSP         "aijcusp"
 44: #define MATSEQAIJCUSP      "seqaijcusp"
 45: #define MATMPIAIJCUSP      "mpiaijcusp"
 46: #define MATAIJPERM         "aijperm"
 47: #define MATSEQAIJPERM      "seqaijperm"
 48: #define MATMPIAIJPERM      "mpiaijperm"
 49: #define MATSHELL           "shell"
 50: #define MATDENSE           "dense"
 51: #define MATSEQDENSE        "seqdense"
 52: #define MATMPIDENSE        "mpidense"
 53: #define MATBAIJ            "baij"
 54: #define MATSEQBAIJ         "seqbaij"
 55: #define MATMPIBAIJ         "mpibaij"
 56: #define MATMPIADJ          "mpiadj"
 57: #define MATSBAIJ           "sbaij"
 58: #define MATSEQSBAIJ        "seqsbaij"
 59: #define MATMPISBAIJ        "mpisbaij"

 61: #define MATSEQBSTRM        "seqbstrm"
 62: #define MATMPIBSTRM        "mpibstrm"
 63: #define MATBSTRM           "bstrm"
 64: #define MATSEQSBSTRM       "seqsbstrm"
 65: #define MATMPISBSTRM       "mpisbstrm"
 66: #define MATSBSTRM          "sbstrm"

 68: #define MATDAAD            "daad"
 69: #define MATMFFD            "mffd"
 70: #define MATNORMAL          "normal"
 71: #define MATLRC             "lrc"
 72: #define MATSCATTER         "scatter"
 73: #define MATBLOCKMAT        "blockmat"
 74: #define MATCOMPOSITE       "composite"
 75: #define MATFFT             "fft"
 76: #define MATFFTW            "fftw"
 77: #define MATSEQCUFFT        "seqcufft"
 78: #define MATTRANSPOSEMAT    "transpose"
 79: #define MATSCHURCOMPLEMENT "schurcomplement"
 80: #define MATPYTHON          "python"
 81: #define MATHYPRESTRUCT     "hyprestruct"
 82: #define MATHYPRESSTRUCT    "hypresstruct"
 83: #define MATSUBMATRIX       "submatrix"
 84: #define MATLOCALREF        "localref"
 85: #define MATNEST            "nest"

 87: /*E
 88:     MatSolverPackage - String with the name of a PETSc matrix solver type. 

 90:     For example: "petsc" indicates what PETSc provides, "superlu" indicates either 
 91:        SuperLU or SuperLU_Dist etc.


 94:    Level: beginner

 96: .seealso: MatGetFactor(), Mat, MatSetType(), MatType
 97: E*/
 98: #define MatSolverPackage char*
 99: #define MATSOLVERSPOOLES      "spooles"
100: #define MATSOLVERSUPERLU      "superlu"
101: #define MATSOLVERSUPERLU_DIST "superlu_dist"
102: #define MATSOLVERUMFPACK      "umfpack"
103: #define MATSOLVERCHOLMOD      "cholmod"
104: #define MATSOLVERESSL         "essl"
105: #define MATSOLVERLUSOL        "lusol"
106: #define MATSOLVERMUMPS        "mumps"
107: #define MATSOLVERPASTIX       "pastix"
108: #define MATSOLVERMATLAB       "matlab"
109: #define MATSOLVERPETSC        "petsc"
110: #define MATSOLVERPLAPACK      "plapack"
111: #define MATSOLVERBAS          "bas"

113: #define MATSOLVERBSTRM        "bstrm"
114: #define MATSOLVERSBSTRM       "sbstrm"

116: /*E
117:     MatFactorType - indicates what type of factorization is requested

119:     Level: beginner

121:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

123: .seealso: MatSolverPackage, MatGetFactor()
124: E*/
125: typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;


133: /* Logging support */
134: #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */

141: /*E
142:     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143:      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144:      that the input matrix is to be replaced with the converted matrix.

146:     Level: beginner

148:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

150: .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151: E*/
152: typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;

154: /*E
155:     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156:      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().

158:     Level: beginner

160: .seealso: MatGetSeqNonzerostructure()
161: E*/
162: typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;


167: PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168: PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)

180: /*MC
181:    MatRegisterDynamic - Adds a new matrix type

183:    Synopsis:
184:    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))

186:    Not Collective

188:    Input Parameters:
189: +  name - name of a new user-defined matrix type
190: .  path - path (either absolute or relative) the library containing this solver
191: .  name_create - name of routine to create method context
192: -  routine_create - routine to create method context

194:    Notes:
195:    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.

197:    If dynamic libraries are used, then the fourth input argument (routine_create)
198:    is ignored.

200:    Sample usage:
201: .vb
202:    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203:                "MyMatCreate",MyMatCreate);
204: .ve

206:    Then, your solver can be chosen with the procedural interface via
207: $     MatSetType(Mat,"my_mat")
208:    or at runtime via the option
209: $     -mat_type my_mat

211:    Level: advanced

213:    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214:          If your function is not being put into a shared library then use VecRegister() instead

216: .keywords: Mat, register

218: .seealso: MatRegisterAll(), MatRegisterDestroy()

220: M*/
221: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222: #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223: #else
224: #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225: #endif


232: /*E
233:     MatStructure - Indicates if the matrix has the same nonzero structure

235:     Level: beginner

237:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

239: .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
240: E*/
241: typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;

246: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
247: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
248: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
249: PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
250: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
251: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
252: PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
254: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
255: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
256: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
257: PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
258: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
259: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
260: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
261: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
262: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
263: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
264: PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
265: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
266: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
267: PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

272: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
273: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
274: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
275: PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
276: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
277: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
278: PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
280: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
281: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
282: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
283: PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
284: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
285: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
286: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
287: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
288: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
289: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
290: PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
291: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
292: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
293: PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

298: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
299: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
300: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
301: PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
302: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
303: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
304: PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))

307: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
308: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
309: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
310: PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
311: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
312: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
313: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
314: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
315: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
316: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
317: PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
318: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
319: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
320: PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))

325: PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326: PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
329: PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)


347: typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;






370: /* ------------------------------------------------------------*/

377: /*S
378:      MatStencil - Data structure (C struct) for storing information about a single row or
379:         column of a matrix as index on an associated grid.

381:    Level: beginner

383:   Concepts: matrix; linear operator

385: .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
386: S*/
387: typedef struct {
388:   PetscInt k,j,i,c;
389: } MatStencil;



399: /*E
400:     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 
401:      to continue to add values to it

403:     Level: beginner

405: .seealso: MatAssemblyBegin(), MatAssemblyEnd()
406: E*/
407: typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;



414: /*E
415:     MatOption - Options that may be set for a matrix and its behavior or storage

417:     Level: beginner

419:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

421: .seealso: MatSetOption()
422: E*/
423: typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
424:               MAT_SYMMETRIC,
425:               MAT_STRUCTURALLY_SYMMETRIC,
426:               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
427:               MAT_NEW_NONZERO_LOCATION_ERR,
428:               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
429:               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
430:               MAT_USE_INODES,
431:               MAT_HERMITIAN,
432:               MAT_SYMMETRY_ETERNAL,
433:               MAT_CHECK_COMPRESSED_ROW,
434:               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
435:               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
436:               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
437:               NUM_MAT_OPTIONS} MatOption;
441: PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)

452: PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
455: PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)


462: PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
466: PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
467: PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
471: PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))

476: /*E
477:     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
478:   its numerical values copied over or just its nonzero structure.

480:     Level: beginner

482:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

484: $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
485: $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
486: $                               have several matrices with the same nonzero pattern.

488: .seealso: MatDuplicate()
489: E*/
490: typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;

493: PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
495: PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
496: PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)


502: PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
503: PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
505: PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
507: PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)


518: /*S
519:      MatInfo - Context of matrix information, used with MatGetInfo()

521:    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE

523:    Level: intermediate

525:   Concepts: matrix^nonzero information

527: .seealso:  MatGetInfo(), MatInfoType
528: S*/
529: typedef struct {
530:   PetscLogDouble block_size;                         /* block size */
531:   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
532:   PetscLogDouble memory;                             /* memory allocated */
533:   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
534:   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
535:   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
536:   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
537: } MatInfo;

539: /*E
540:     MatInfoType - Indicates if you want information about the local part of the matrix,
541:      the entire parallel matrix or the maximum over all the local parts.

543:     Level: beginner

545:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

547: .seealso: MatGetInfo(), MatInfo
548: E*/
549: typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
558: PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
561: PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
565: PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)

572: PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)




609: #if defined (PETSC_USE_CTABLE)
610:  #include petscctable.h
612: #else
614: #endif









653: PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))

660: /*MC
661:    MatSetValue - Set a single entry into a matrix.

663:    Not collective

665:    Input Parameters:
666: +  m - the matrix
667: .  row - the row location of the entry
668: .  col - the column location of the entry
669: .  value - the value to insert
670: -  mode - either INSERT_VALUES or ADD_VALUES

672:    Notes: 
673:    For efficiency one should use MatSetValues() and set several or many
674:    values simultaneously if possible.

676:    Level: beginner

678: .seealso: MatSetValues(), MatSetValueLocal()
679: M*/
680: PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}

682: PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}

684: PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}

686: /*MC
687:    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
688:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

690:    Synopsis:
691:    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)

693:    Collective on MPI_Comm

695:    Input Parameters:
696: +  comm - the communicator that will share the eventually allocated matrix
697: .  nrows - the number of LOCAL rows in the matrix
698: -  ncols - the number of LOCAL columns in the matrix

700:    Output Parameters:
701: +  dnz - the array that will be passed to the matrix preallocation routines
702: -  ozn - the other array passed to the matrix preallocation routines


705:    Level: intermediate

707:    Notes:
708:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

710:    Do not malloc or free dnz and onz, that is handled internally by these routines

712:    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)

714:    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().

716:   Concepts: preallocation^Matrix

718: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
719:           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
720: M*/
721: #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
722: { \
723:   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
724:   _4_PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
725:   _4_PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
726:   _4_PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
727:   _4_MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
728:   _4_MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;

730: /*MC
731:    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
732:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

734:    Synopsis:
735:    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)

737:    Collective on MPI_Comm

739:    Input Parameters:
740: +  comm - the communicator that will share the eventually allocated matrix
741: .  nrows - the number of LOCAL rows in the matrix
742: -  ncols - the number of LOCAL columns in the matrix

744:    Output Parameters:
745: +  dnz - the array that will be passed to the matrix preallocation routines
746: -  ozn - the other array passed to the matrix preallocation routines


749:    Level: intermediate

751:    Notes:
752:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

754:    Do not malloc or free dnz and onz, that is handled internally by these routines

756:    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().

758:   Concepts: preallocation^Matrix

760: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
761:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
762: M*/
763: #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
764: { \
765:   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
766:   _4_PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
767:   _4_PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
768:   _4_PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
769:   _4_MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
770:   _4_MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;

772: /*MC
773:    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
774:        inserted using a local number of the rows and columns

776:    Synopsis:
777:    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

779:    Not Collective

781:    Input Parameters:
782: +  map - the row mapping from local numbering to global numbering
783: .  nrows - the number of rows indicated
784: .  rows - the indices of the rows
785: .  cmap - the column mapping from local to global numbering
786: .  ncols - the number of columns in the matrix
787: .  cols - the columns indicated
788: .  dnz - the array that will be passed to the matrix preallocation routines
789: -  ozn - the other array passed to the matrix preallocation routines


792:    Level: intermediate

794:    Notes:
795:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

797:    Do not malloc or free dnz and onz, that is handled internally by these routines

799:   Concepts: preallocation^Matrix

801: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
802:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
803: M*/
804: #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
805: {\
806:   PetscInt __l;\
807:   _4_ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
808:   _4_ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
809:   for (__l=0;__l<nrows;__l++) {\
810:     _4_MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
811:   }\
812: }
813: 
814: /*MC
815:    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
816:        inserted using a local number of the rows and columns

818:    Synopsis:
819:    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

821:    Not Collective

823:    Input Parameters:
824: +  map - the mapping between local numbering and global numbering
825: .  nrows - the number of rows indicated
826: .  rows - the indices of the rows 
827: .  ncols - the number of columns in the matrix
828: .  cols - the columns indicated
829: .  dnz - the array that will be passed to the matrix preallocation routines
830: -  ozn - the other array passed to the matrix preallocation routines


833:    Level: intermediate

835:    Notes:
836:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

838:    Do not malloc or free dnz and onz that is handled internally by these routines

840:   Concepts: preallocation^Matrix

842: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
843:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
844: M*/
845: #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
846: {\
847:   PetscInt __l;\
848:   _4_ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
849:   _4_ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
850:   for (__l=0;__l<nrows;__l++) {\
851:     _4_MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
852:   }\
853: }

855: /*MC
856:    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
857:        inserted using a local number of the rows and columns

859:    Synopsis:
860:    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

862:    Not Collective

864:    Input Parameters:
865: +  row - the row
866: .  ncols - the number of columns in the matrix
867: -  cols - the columns indicated

869:    Output Parameters:
870: +  dnz - the array that will be passed to the matrix preallocation routines
871: -  ozn - the other array passed to the matrix preallocation routines


874:    Level: intermediate

876:    Notes:
877:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

879:    Do not malloc or free dnz and onz that is handled internally by these routines

881:    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().

883:   Concepts: preallocation^Matrix

885: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
886:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
887: M*/
888: #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
889: { PetscInt __i; \
890:   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
891:   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
892:   for (__i=0; __i<nc; __i++) {\
893:     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
894:     else dnz[row - __rstart]++;\
895:   }\
896: }

898: /*MC
899:    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
900:        inserted using a local number of the rows and columns

902:    Synopsis:
903:    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)

905:    Not Collective

907:    Input Parameters:
908: +  nrows - the number of rows indicated
909: .  rows - the indices of the rows 
910: .  ncols - the number of columns in the matrix
911: .  cols - the columns indicated
912: .  dnz - the array that will be passed to the matrix preallocation routines
913: -  ozn - the other array passed to the matrix preallocation routines


916:    Level: intermediate

918:    Notes:
919:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

921:    Do not malloc or free dnz and onz that is handled internally by these routines

923:    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().

925:   Concepts: preallocation^Matrix

927: .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
928:           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
929: M*/
930: #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
931: { PetscInt __i; \
932:   for (__i=0; __i<nc; __i++) {\
933:     if (cols[__i] >= __end) onz[row - __rstart]++; \
934:     else if (cols[__i] >= row) dnz[row - __rstart]++;\
935:   }\
936: }

938: /*MC
939:    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists

941:    Synopsis:
942:    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)

944:    Not Collective

946:    Input Parameters:
947: .  A - matrix
948: .  row - row where values exist (must be local to this process)
949: .  ncols - number of columns
950: .  cols - columns with nonzeros
951: .  dnz - the array that will be passed to the matrix preallocation routines
952: -  ozn - the other array passed to the matrix preallocation routines


955:    Level: intermediate

957:    Notes:
958:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

960:    Do not malloc or free dnz and onz that is handled internally by these routines

962:    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.

964:   Concepts: preallocation^Matrix

966: .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
967:           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
968: M*/
969: #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);} else { MatPreallocateSet(row,ncols,cols,dnz,onz);}


972: /*MC
973:    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
974:        row in a matrix providing the data that one can use to correctly preallocate the matrix.

976:    Synopsis:
977:    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)

979:    Collective on MPI_Comm

981:    Input Parameters:
982: +  dnz - the array that was be passed to the matrix preallocation routines
983: -  ozn - the other array passed to the matrix preallocation routines


986:    Level: intermediate

988:    Notes:
989:     See the <A href="../../docs/manual.pdf#nameddest=Chapter 12 Hints for Performance Tuning">Hints for Performance Improvment</A> chapter in the users manual for more details.

991:    Do not malloc or free dnz and onz that is handled internally by these routines

993:    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().

995:   Concepts: preallocation^Matrix

997: .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
998:           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
999: M*/
1000: #define MatPreallocateFinalize(dnz,onz) 0;_4_PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}



1004: /* Routines unique to particular data structures */
1006: PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)



1017: #define MAT_SKIP_ALLOCATION -4

1020: PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1022: PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1024: PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))

1027: PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))




1050: /* 
1051:   These routines are not usually accessed directly, rather solving is 
1052:   done through the KSP and PC interfaces.
1053: */

1055: /*E
1056:     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1057:        with an optional dynamic library name, for example 
1058:        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()

1060:    Level: beginner

1062:    Cannot use const because the PC objects manipulate the string

1064: .seealso: MatGetOrdering()
1065: E*/
1066: #define MatOrderingType char*
1067: #define MATORDERINGNATURAL     "natural"
1068: #define MATORDERINGND          "nd"
1069: #define MATORDERING1WD         "1wd"
1070: #define MATORDERINGRCM         "rcm"
1071: #define MATORDERINGQMD         "qmd"
1072: #define MATORDERINGROWLENGTH   "rowlength"
1073: #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */


1079: /*MC
1080:    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 

1082:    Synopsis:
1083:    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))

1085:    Not Collective

1087:    Input Parameters:
1088: +  sname - name of ordering (for example MATORDERINGND)
1089: .  path - location of library where creation routine is 
1090: .  name - name of function that creates the ordering type,a string
1091: -  function - function pointer that creates the ordering

1093:    Level: developer

1095:    If dynamic libraries are used, then the fourth input argument (function)
1096:    is ignored.

1098:    Sample usage:
1099: .vb
1100:    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1101:                "MyOrder",MyOrder);
1102: .ve

1104:    Then, your partitioner can be chosen with the procedural interface via
1105: $     MatOrderingSetType(part,"my_order)
1106:    or at runtime via the option
1107: $     -pc_factor_mat_ordering_type my_order

1109:    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.

1111: .keywords: matrix, ordering, register

1113: .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1114: M*/
1115: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1116: #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1117: #else
1118: #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1119: #endif



1128: /*S
1129:     MatFactorShiftType - Numeric Shift.

1131:    Level: beginner

1133: S*/
1134: typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;

1137: /*S 
1138:    MatFactorInfo - Data passed into the matrix factorization routines

1140:    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1141: $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)

1143:    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.

1145:       You can use MatFactorInfoInitialize() to set default values.

1147:    Level: developer

1149: .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 
1150:           MatFactorInfoInitialize()

1152: S*/
1153: typedef struct {
1154:   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1155:   PetscReal     usedt;
1156:   PetscReal     dt;             /* drop tolerance */
1157:   PetscReal     dtcol;          /* tolerance for pivoting */
1158:   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1159:   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1160:   PetscReal     levels;         /* ICC/ILU(levels) */
1161:   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 
1162:                                    factorization may be faster if do not pivot */
1163:   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1164:   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1165:   PetscReal     shiftamount;     /* how large the shift is */
1166: } MatFactorInfo;



1190: /*E
1191:     MatSORType - What type of (S)SOR to perform

1193:     Level: beginner

1195:    May be bitwise ORd together

1197:    Any additions/changes here MUST also be made in include/finclude/petscmat.h

1199:    MatSORType may be bitwise ORd together, so do not change the numbers 

1201: .seealso: MatSOR()
1202: E*/
1203: typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1204:               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1205:               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1206:               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;

1209: /* 
1210:     These routines are for efficiently computing Jacobians via finite differences.
1211: */

1213: /*E
1214:     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1215:        with an optional dynamic library name, for example 
1216:        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()

1218:    Level: beginner

1220: .seealso: MatGetColoring()
1221: E*/
1222: #define MatColoringType char*
1223: #define MATCOLORINGNATURAL "natural"
1224: #define MATCOLORINGSL      "sl"
1225: #define MATCOLORINGLF      "lf"
1226: #define MATCOLORINGID      "id"


1231: /*MC
1232:    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 
1233:                                matrix package. 

1235:    Synopsis:
1236:    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))

1238:    Not Collective

1240:    Input Parameters:
1241: +  sname - name of Coloring (for example MATCOLORINGSL)
1242: .  path - location of library where creation routine is 
1243: .  name - name of function that creates the Coloring type, a string
1244: -  function - function pointer that creates the coloring

1246:    Level: developer

1248:    If dynamic libraries are used, then the fourth input argument (function)
1249:    is ignored.

1251:    Sample usage:
1252: .vb
1253:    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1254:                "MyColor",MyColor);
1255: .ve

1257:    Then, your partitioner can be chosen with the procedural interface via
1258: $     MatColoringSetType(part,"my_color")
1259:    or at runtime via the option
1260: $     -mat_coloring_type my_color

1262:    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.

1264: .keywords: matrix, Coloring, register

1266: .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1267: M*/
1268: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1269: #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1270: #else
1271: #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1272: #endif



1280: /*S
1281:      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1282:         and coloring

1284:    Level: beginner

1286:   Concepts: coloring, sparse Jacobian, finite differences

1288: .seealso:  MatFDColoringCreate()
1289: S*/
1290: typedef struct _p_MatFDColoring* MatFDColoring;

1303: /* 
1304:     These routines are for partitioning matrices: currently used only 
1305:   for adjacency matrix, MatCreateMPIAdj().
1306: */

1308: /*S
1309:      MatPartitioning - Object for managing the partitioning of a matrix or graph

1311:    Level: beginner

1313:   Concepts: partitioning

1315: .seealso:  MatPartitioningCreate(), MatPartitioningType
1316: S*/
1317: typedef struct _p_MatPartitioning* MatPartitioning;

1319: /*E
1320:     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1321:        with an optional dynamic library name, for example 
1322:        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()

1324:    Level: beginner

1326: .seealso: MatPartitioningCreate(), MatPartitioning
1327: E*/
1328: #define MatPartitioningType char*
1329: #define MATPARTITIONINGCURRENT  "current"
1330: #define MATPARTITIONINGSQUARE   "square"
1331: #define MATPARTITIONINGPARMETIS "parmetis"
1332: #define MATPARTITIONINGCHACO    "chaco"
1333: #define MATPARTITIONINGPARTY    "party"
1334: #define MATPARTITIONINGPTSCOTCH "ptscotch"




1348: /*MC
1349:    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 
1350:    matrix package. 

1352:    Synopsis:
1353:    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))

1355:    Not Collective

1357:    Input Parameters:
1358: +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1359: .  path - location of library where creation routine is 
1360: .  name - name of function that creates the partitioning type, a string
1361: -  function - function pointer that creates the partitioning type

1363:    Level: developer

1365:    If dynamic libraries are used, then the fourth input argument (function)
1366:    is ignored.

1368:    Sample usage:
1369: .vb
1370:    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1371:                "MyPartCreate",MyPartCreate);
1372: .ve

1374:    Then, your partitioner can be chosen with the procedural interface via
1375: $     MatPartitioningSetType(part,"my_part")
1376:    or at runtime via the option
1377: $     -mat_partitioning_type my_part

1379:    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.

1381: .keywords: matrix, partitioning, register

1383: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1384: M*/
1385: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1386: #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1387: #else
1388: #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1389: #endif





1403: typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1405: typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1407: typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;


1422: #define MP_PARTY_OPT "opt"
1423: #define MP_PARTY_LIN "lin"
1424: #define MP_PARTY_SCA "sca"
1425: #define MP_PARTY_RAN "ran"
1426: #define MP_PARTY_GBF "gbf"
1427: #define MP_PARTY_GCF "gcf"
1428: #define MP_PARTY_BUB "bub"
1429: #define MP_PARTY_DEF "def"
1431: #define MP_PARTY_HELPFUL_SETS "hs"
1432: #define MP_PARTY_KERNIGHAN_LIN "kl"
1433: #define MP_PARTY_NONE "no"

1439: typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;



1450: /*
1451:     If you add entries here you must also add them to finclude/petscmat.h
1452: */
1453: typedef enum { MATOP_SET_VALUES=0,
1454:                MATOP_GET_ROW=1,
1455:                MATOP_RESTORE_ROW=2,
1456:                MATOP_MULT=3,
1457:                MATOP_MULT_ADD=4,
1458:                MATOP_MULT_TRANSPOSE=5,
1459:                MATOP_MULT_TRANSPOSE_ADD=6,
1460:                MATOP_SOLVE=7,
1461:                MATOP_SOLVE_ADD=8,
1462:                MATOP_SOLVE_TRANSPOSE=9,
1463:                MATOP_SOLVE_TRANSPOSE_ADD=10,
1464:                MATOP_LUFACTOR=11,
1465:                MATOP_CHOLESKYFACTOR=12,
1466:                MATOP_SOR=13,
1467:                MATOP_TRANSPOSE=14,
1468:                MATOP_GETINFO=15,
1469:                MATOP_EQUAL=16,
1470:                MATOP_GET_DIAGONAL=17,
1471:                MATOP_DIAGONAL_SCALE=18,
1472:                MATOP_NORM=19,
1473:                MATOP_ASSEMBLY_BEGIN=20,
1474:                MATOP_ASSEMBLY_END=21,
1475:                MATOP_SET_OPTION=22,
1476:                MATOP_ZERO_ENTRIES=23,
1477:                MATOP_ZERO_ROWS=24,
1478:                MATOP_LUFACTOR_SYMBOLIC=25,
1479:                MATOP_LUFACTOR_NUMERIC=26,
1480:                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1481:                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1482:                MATOP_SETUP_PREALLOCATION=29,
1483:                MATOP_ILUFACTOR_SYMBOLIC=30,
1484:                MATOP_ICCFACTOR_SYMBOLIC=31,
1485:                MATOP_GET_ARRAY=32,
1486:                MATOP_RESTORE_ARRAY=33,
1487:                MATOP_DUPLICATE=34,
1488:                MATOP_FORWARD_SOLVE=35,
1489:                MATOP_BACKWARD_SOLVE=36,
1490:                MATOP_ILUFACTOR=37,
1491:                MATOP_ICCFACTOR=38,
1492:                MATOP_AXPY=39,
1493:                MATOP_GET_SUBMATRICES=40,
1494:                MATOP_INCREASE_OVERLAP=41,
1495:                MATOP_GET_VALUES=42,
1496:                MATOP_COPY=43,
1497:                MATOP_GET_ROW_MAX=44,
1498:                MATOP_SCALE=45,
1499:                MATOP_SHIFT=46,
1500:                MATOP_DIAGONAL_SET=47,
1501:                MATOP_ILUDT_FACTOR=48,
1502:                MATOP_SET_BLOCK_SIZE=49,
1503:                MATOP_GET_ROW_IJ=50,
1504:                MATOP_RESTORE_ROW_IJ=51,
1505:                MATOP_GET_COLUMN_IJ=52,
1506:                MATOP_RESTORE_COLUMN_IJ=53,
1507:                MATOP_FDCOLORING_CREATE=54,
1508:                MATOP_COLORING_PATCH=55,
1509:                MATOP_SET_UNFACTORED=56,
1510:                MATOP_PERMUTE=57,
1511:                MATOP_SET_VALUES_BLOCKED=58,
1512:                MATOP_GET_SUBMATRIX=59,
1513:                MATOP_DESTROY=60,
1514:                MATOP_VIEW=61,
1515:                MATOP_CONVERT_FROM=62,
1516:                MATOP_USE_SCALED_FORM=63,
1517:                MATOP_SCALE_SYSTEM=64,
1518:                MATOP_UNSCALE_SYSTEM=65,
1519:                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1520:                MATOP_SET_VALUES_LOCAL=67,
1521:                MATOP_ZERO_ROWS_LOCAL=68,
1522:                MATOP_GET_ROW_MAX_ABS=69,
1523:                MATOP_GET_ROW_MIN_ABS=70,
1524:                MATOP_CONVERT=71,
1525:                MATOP_SET_COLORING=72,
1526:                MATOP_SET_VALUES_ADIC=73,
1527:                MATOP_SET_VALUES_ADIFOR=74,
1528:                MATOP_FD_COLORING_APPLY=75,
1529:                MATOP_SET_FROM_OPTIONS=76,
1530:                MATOP_MULT_CON=77,
1531:                MATOP_MULT_TRANSPOSE_CON=78,
1532:                MATOP_PERMUTE_SPARSIFY=79,
1533:                MATOP_MULT_MULTIPLE=80,
1534:                MATOP_SOLVE_MULTIPLE=81,
1535:                MATOP_GET_INERTIA=82,
1536:                MATOP_LOAD=83,
1537:                MATOP_IS_SYMMETRIC=84,
1538:                MATOP_IS_HERMITIAN=85,
1539:                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1540:                MATOP_DUMMY=87,
1541:                MATOP_GET_VECS=88,
1542:                MATOP_MAT_MULT=89,
1543:                MATOP_MAT_MULT_SYMBOLIC=90,
1544:                MATOP_MAT_MULT_NUMERIC=91,
1545:                MATOP_PTAP=92,
1546:                MATOP_PTAP_SYMBOLIC=93,
1547:                MATOP_PTAP_NUMERIC=94,
1548:                MATOP_MAT_MULTTRANSPOSE=95,
1549:                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1550:                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1551:                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1552:                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1553:                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1554:                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1555:                MATOP_CONJUGATE=102,
1556:                MATOP_SET_SIZES=103,
1557:                MATOP_SET_VALUES_ROW=104,
1558:                MATOP_REAL_PART=105,
1559:                MATOP_IMAG_PART=106,
1560:                MATOP_GET_ROW_UTRIANGULAR=107,
1561:                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1562:                MATOP_MATSOLVE=109,
1563:                MATOP_GET_REDUNDANTMATRIX=110,
1564:                MATOP_GET_ROW_MIN=111,
1565:                MATOP_GET_COLUMN_VEC=112,
1566:                MATOP_MISSING_DIAGONAL=113,
1567:                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1568:                MATOP_CREATE=115,
1569:                MATOP_GET_GHOSTS=116,
1570:                MATOP_GET_LOCALSUBMATRIX=117,
1571:                MATOP_RESTORE_LOCALSUBMATRIX=118,
1572:                MATOP_MULT_DIAGONAL_BLOCK=119,
1573:                MATOP_HERMITIANTRANSPOSE=120,
1574:                MATOP_MULTHERMITIANTRANSPOSE=121,
1575:                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1576:                MATOP_GETMULTIPROCBLOCK=123,
1577:                MATOP_GETCOLUMNNORMS=125,
1578:                MATOP_GET_SUBMATRICES_PARALLEL=128,
1579:                MATOP_SET_VALUES_BATCH=129
1580:              } MatOperation;

1586: /*
1587:    Codes for matrices stored on disk. By default they are
1588:    stored in a universal format. By changing the format with 
1589:    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1590:    be stored in a way natural for the matrix, for example dense matrices
1591:    would be stored as dense. Matrices stored this way may only be
1592:    read into matrices of the same type.
1593: */
1594: #define MATRIX_BINARY_FORMAT_DENSE -1


1599: /*S
1600:      MatNullSpace - Object that removes a null space from a vector, i.e.
1601:          orthogonalizes the vector to a subsapce

1603:    Level: advanced

1605:   Concepts: matrix; linear operator, null space

1607:   Users manual sections:
1608: .   Section 4.16 Solving Singular Systems

1610: .seealso:  MatNullSpaceCreate()
1611: S*/
1612: typedef struct _p_MatNullSpace* MatNullSpace;







1650: /*S
1651:     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 
1652:               Jacobian vector products

1654:     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure

1656:            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure

1658:     Level: developer

1660: .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1661: S*/
1662: typedef struct _p_MatMFFD* MatMFFD;

1664: /*E
1665:     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function

1667:    Level: beginner

1669: .seealso: MatMFFDSetType(), MatMFFDRegister()
1670: E*/
1671: #define MatMFFDType char*
1672: #define MATMFFD_DS  "ds"
1673: #define MATMFFD_WP  "wp"


1678: /*MC
1679:    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.

1681:    Synopsis:
1682:    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))

1684:    Not Collective

1686:    Input Parameters:
1687: +  name_solver - name of a new user-defined compute-h module
1688: .  path - path (either absolute or relative) the library containing this solver
1689: .  name_create - name of routine to create method context
1690: -  routine_create - routine to create method context

1692:    Level: developer

1694:    Notes:
1695:    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.

1697:    If dynamic libraries are used, then the fourth input argument (routine_create)
1698:    is ignored.

1700:    Sample usage:
1701: .vb
1702:    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1703:                "MyHCreate",MyHCreate);
1704: .ve

1706:    Then, your solver can be chosen with the procedural interface via
1707: $     MatMFFDSetType(mfctx,"my_h")
1708:    or at runtime via the option
1709: $     -snes_mf_type my_h

1711: .keywords: MatMFFD, register

1713: .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1714: M*/
1715: #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1716: #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1717: #else
1718: #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1719: #endif




1730: /* 
1731:    PETSc interface to MUMPS 
1732: */
1733: #ifdef PETSC_HAVE_MUMPS
1735: #endif

1737: /* 
1738:    PETSc interface to SUPERLU
1739: */
1740: #ifdef PETSC_HAVE_SUPERLU
1742: #endif

1744: #if defined(PETSC_HAVE_CUSP)
1747: #endif

1749: /* 
1750:    PETSc interface to FFTW
1751: */
1752: #if defined(PETSC_HAVE_FFTW)
1756: #endif


1767: #endif