Actual source code: gamg.c
petsc-3.3-p5 2012-12-01
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include "petsc-private/matimpl.h"
5: #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6: #include <petsc-private/kspimpl.h>
8: #if defined PETSC_GAMG_USE_LOG
9: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10: #endif
12: #if defined PETSC_USE_LOG
13: PetscLogEvent PC_GAMGGgraph_AGG;
14: PetscLogEvent PC_GAMGGgraph_GEO;
15: PetscLogEvent PC_GAMGCoarsen_AGG;
16: PetscLogEvent PC_GAMGCoarsen_GEO;
17: PetscLogEvent PC_GAMGProlongator_AGG;
18: PetscLogEvent PC_GAMGProlongator_GEO;
19: PetscLogEvent PC_GAMGOptprol_AGG;
20: PetscLogEvent PC_GAMGKKTProl_AGG;
21: #endif
23: #define GAMG_MAXLEVELS 30
25: /* #define GAMG_STAGES */
26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28: #endif
30: static PetscFList GAMGList = 0;
32: /* ----------------------------------------------------------------------------- */
35: PetscErrorCode PCReset_GAMG(PC pc)
36: {
37: PetscErrorCode ierr;
38: PC_MG *mg = (PC_MG*)pc->data;
39: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
42: if( pc_gamg->data ) { /* this should not happen, cleaned up in SetUp */
43: PetscFree( pc_gamg->data );
44: }
45: pc_gamg->data = PETSC_NULL; pc_gamg->data_sz = 0;
46: return(0);
47: }
49: /* private 2x2 Mat Nest for Stokes */
50: typedef struct{
51: Mat A11,A21,A12,Amat;
52: IS prim_is,constr_is;
53: }GAMGKKTMat;
57: static PetscErrorCode GAMGKKTMatCreate( Mat A, PetscBool iskkt, GAMGKKTMat *out )
58: {
60: out->Amat = A;
61: if( iskkt ){
62: PetscErrorCode ierr;
63: IS is_constraint, is_prime;
64: PetscInt nmin,nmax;
66: MatGetOwnershipRange( A, &nmin, &nmax );
67: MatFindZeroDiagonals( A, &is_constraint );
68: ISComplement( is_constraint, nmin, nmax, &is_prime );
69: out->prim_is = is_prime;
70: out->constr_is = is_constraint;
71:
72: MatGetSubMatrix( A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);
73: MatGetSubMatrix( A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);
74: MatGetSubMatrix( A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);
75: PetscPrintf(((PetscObject)A)->comm,"[%d]%s N=%d N_11=%d\n",0,__FUNCT__,A->rmap->N,out->A11->rmap->N);
76: }
77: else {
78: out->A11 = A;
79: out->A21 = PETSC_NULL;
80: out->A12 = PETSC_NULL;
81: out->prim_is = PETSC_NULL;
82: out->constr_is = PETSC_NULL;
83: }
84: return(0);
85: }
89: static PetscErrorCode GAMGKKTMatDestroy( GAMGKKTMat *mat )
90: {
91: PetscErrorCode ierr;
94: if( mat->A11 && mat->A11 != mat->Amat ) {
95: MatDestroy( &mat->A11 );
96: }
97: MatDestroy( &mat->A21 );
98: MatDestroy( &mat->A12 );
100: ISDestroy( &mat->prim_is );
101: ISDestroy( &mat->constr_is );
103: return(0);
104: }
106: /* -------------------------------------------------------------------------- */
107: /*
108: createLevel: create coarse op with RAP. repartition and/or reduce number
109: of active processors.
111: Input Parameter:
112: . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
113: 'pc_gamg->data_sz' are changed via repartitioning/reduction.
114: . Amat_fine - matrix on this fine (k) level
115: . cr_bs - coarse block size
116: . isLast -
117: . stokes -
118: In/Output Parameter:
119: . a_P_inout - prolongation operator to the next level (k-->k-1)
120: . a_nactive_proc - number of active procs
121: Output Parameter:
122: . a_Amat_crs - coarse matrix that is created (k-1)
123: */
127: static PetscErrorCode createLevel( const PC pc,
128: const Mat Amat_fine,
129: const PetscInt cr_bs,
130: const PetscBool isLast,
131: const PetscBool stokes,
132: Mat *a_P_inout,
133: Mat *a_Amat_crs,
134: PetscMPIInt *a_nactive_proc
135: )
136: {
137: PetscErrorCode ierr;
138: PC_MG *mg = (PC_MG*)pc->data;
139: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
140: const PetscBool repart = pc_gamg->repart;
141: const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
142: Mat Cmat,Pold=*a_P_inout;
143: MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm;
144: PetscMPIInt mype,npe,new_npe,nactive=*a_nactive_proc;
145: PetscInt ncrs_eq,ncrs_prim,f_bs;
148: MPI_Comm_rank( wcomm, &mype );
149: MPI_Comm_size( wcomm, &npe );
150: MatGetBlockSize( Amat_fine, &f_bs );
151: /* RAP */
152: MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat );
154: /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/
155: ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
156: assert(pc_gamg->data_sz%(pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)==0);
157: MatGetLocalSize( Cmat, &ncrs_eq, PETSC_NULL );
158:
159: /* get number of PEs to make active 'new_npe', reduce, can be any integer 1-P */
160: {
161: PetscInt ncrs_eq_glob,ncrs_eq_ave;
162: MatGetSize( Cmat, &ncrs_eq_glob, PETSC_NULL );
163: ncrs_eq_ave = ncrs_eq_glob/npe;
164: new_npe = (PetscMPIInt)((float)ncrs_eq_ave/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
165: if( new_npe == 0 || ncrs_eq_ave < coarse_max ) new_npe = 1;
166: else if ( new_npe >= nactive ) new_npe = nactive; /* no change, rare */
167: if( isLast ) new_npe = 1;
168: }
170: if( !repart && new_npe==nactive ) {
171: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
172: }
173: else {
174: const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
175: PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old;
176: IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices;
177: VecScatter vecscat;
178: PetscScalar *array;
179: Vec src_crd, dest_crd;
181: nloc_old = ncrs_eq/cr_bs; assert(ncrs_eq%cr_bs==0);
182: #if defined PETSC_GAMG_USE_LOG
183: PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
184: #endif
185: /* make 'is_eq_newproc' */
186: PetscMalloc( npe*sizeof(PetscInt), &counts );
187: if( repart && !stokes ) {
188: /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
189: Mat adj;
191: if (pc_gamg->verbose>0) {
192: if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,ncrs_eq);
193: else {
194: PetscInt n;
195: MPI_Allreduce( &ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm );
196: PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,n);
197: }
198: }
200: /* get 'adj' */
201: if( cr_bs == 1 ) {
202: MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );
203: }
204: else{
205: /* make a scalar matrix to partition (no Stokes here) */
206: Mat tMat;
207: PetscInt Istart_crs,Iend_crs,ncols,jj,Ii;
208: const PetscScalar *vals;
209: const PetscInt *idx;
210: PetscInt *d_nnz, *o_nnz, M, N;
211: static PetscInt llev = 0;
212:
213: PetscMalloc( ncrs_prim*sizeof(PetscInt), &d_nnz );
214: PetscMalloc( ncrs_prim*sizeof(PetscInt), &o_nnz );
215: MatGetOwnershipRange( Cmat, &Istart_crs, &Iend_crs );
216: MatGetSize( Cmat, &M, &N );
217: for ( Ii = Istart_crs, jj = 0 ; Ii < Iend_crs ; Ii += cr_bs, jj++ ) {
218: MatGetRow(Cmat,Ii,&ncols,0,0);
219: d_nnz[jj] = ncols/cr_bs;
220: o_nnz[jj] = ncols/cr_bs;
221: MatRestoreRow(Cmat,Ii,&ncols,0,0);
222: if( d_nnz[jj] > ncrs_prim ) d_nnz[jj] = ncrs_prim;
223: if( o_nnz[jj] > (M/cr_bs-ncrs_prim) ) o_nnz[jj] = M/cr_bs-ncrs_prim;
224: }
226: MatCreate( wcomm, &tMat );
227: MatSetSizes( tMat, ncrs_prim, ncrs_prim,
228: PETSC_DETERMINE, PETSC_DETERMINE );
229:
230: MatSetType(tMat,MATAIJ);
231: MatSeqAIJSetPreallocation(tMat,0,d_nnz);
232: MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
233: PetscFree( d_nnz );
234: PetscFree( o_nnz );
236: for ( ii = Istart_crs; ii < Iend_crs; ii++ ) {
237: PetscInt dest_row = ii/cr_bs;
238: MatGetRow(Cmat,ii,&ncols,&idx,&vals);
239: for( jj = 0 ; jj < ncols ; jj++ ){
240: PetscInt dest_col = idx[jj]/cr_bs;
241: PetscScalar v = 1.0;
242: MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
243: }
244: MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
245: }
246: MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
247: MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);
248:
249: if( llev++ == -1 ) {
250: PetscViewer viewer; char fname[32];
251: PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);
252: PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
253: MatView( tMat, viewer );
254: PetscViewerDestroy( &viewer );
255: }
257: MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );
259: MatDestroy( &tMat );
260: } /* create 'adj' */
262: { /* partition: get newproc_idx */
263: char prefix[256];
264: const char *pcpre;
265: const PetscInt *is_idx;
266: MatPartitioning mpart;
267: IS proc_is;
268: PetscInt targetPE;
269:
270: MatPartitioningCreate( wcomm, &mpart );
271: MatPartitioningSetAdjacency( mpart, adj );
272: PCGetOptionsPrefix( pc, &pcpre );
273: PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");
274: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
275: MatPartitioningSetFromOptions( mpart );
276: MatPartitioningSetNParts( mpart, new_npe );
277: MatPartitioningApply( mpart, &proc_is );
278: MatPartitioningDestroy( &mpart );
279:
280: /* collect IS info */
281: PetscMalloc( ncrs_eq*sizeof(PetscInt), &newproc_idx );
282: ISGetIndices( proc_is, &is_idx );
283: targetPE = 1; /* bring to "front" of machine */
284: /*targetPE = npe/new_npe;*/ /* spread partitioning across machine */
285: for( kk = jj = 0 ; kk < nloc_old ; kk++ ){
286: for( ii = 0 ; ii < cr_bs ; ii++, jj++ ){
287: newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
288: }
289: }
290: ISRestoreIndices( proc_is, &is_idx );
291: ISDestroy( &proc_is );
292: }
293: MatDestroy( &adj );
295: ISCreateGeneral( wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc );
296:
297: if( newproc_idx != 0 ) {
298: PetscFree( newproc_idx );
299: }
300: } /* repartitioning */
301: else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
303: PetscInt rfactor,targetPE;
304: /* find factor */
305: if( new_npe == 1 ) rfactor = npe; /* easy */
306: else {
307: PetscReal best_fact = 0.;
308: jj = -1;
309: for( kk = 1 ; kk <= npe ; kk++ ){
310: if( npe%kk==0 ) { /* a candidate */
311: PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe;
312: if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */
313: if( fact > best_fact ) {
314: best_fact = fact; jj = kk;
315: }
316: }
317: }
318: if( jj != -1 ) rfactor = jj;
319: else rfactor = 1; /* does this happen .. a prime */
320: }
321: new_npe = npe/rfactor;
323: if( new_npe==nactive ) {
324: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
325: PetscFree( counts );
326: if (pc_gamg->verbose>0){
327: PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq(loc)=%d\n",mype,__FUNCT__,new_npe,ncrs_eq);
328: }
329: return(0);
330: }
332: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",mype,__FUNCT__,ncrs_eq);
333: targetPE = mype/rfactor;
334: ISCreateStride( wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc );
336: if( stokes ) {
337: ISCreateStride( wcomm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim );
338: }
339: } /* end simple 'is_eq_newproc' */
341: /*
342: Create an index set from the is_eq_newproc index set to indicate the mapping TO
343: */
344: ISPartitioningToNumbering( is_eq_newproc, &is_eq_num );
345: if( stokes ) {
346: ISPartitioningToNumbering( is_eq_newproc_prim, &is_eq_num_prim );
347: }
348: else is_eq_num_prim = is_eq_num;
349: /*
350: Determine how many equations/vertices are assigned to each processor
351: */
352: ISPartitioningCount( is_eq_newproc, npe, counts );
353: ncrs_eq_new = counts[mype];
354: ISDestroy( &is_eq_newproc );
355: if( stokes ) {
356: ISPartitioningCount( is_eq_newproc_prim, npe, counts );
357: ISDestroy( &is_eq_newproc_prim );
358: ncrs_prim_new = counts[mype]/cr_bs; /* nodes */
359: }
360: else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */
362: PetscFree( counts );
363: #if defined PETSC_GAMG_USE_LOG
364: PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
365: #endif
367: /* move data (for primal equations only) */
368: /* Create a vector to contain the newly ordered element information */
369: VecCreate( wcomm, &dest_crd );
370: VecSetSizes( dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE );
371: VecSetFromOptions( dest_crd ); /* this is needed! */
372: /*
373: There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
374: a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'.
375: */
376: PetscMalloc( (ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx );
377: ISGetIndices( is_eq_num_prim, &idx );
378: for(ii=0,jj=0; ii<ncrs_prim ; ii++) {
379: PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
380: for( kk=0; kk<node_data_sz ; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
381: }
382: ISRestoreIndices( is_eq_num_prim, &idx );
383: ISCreateGeneral( wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat );
384:
385: PetscFree( tidx );
386: /*
387: Create a vector to contain the original vertex information for each element
388: */
389: VecCreateSeq( PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd );
390: for( jj=0; jj<ndata_cols ; jj++ ) {
391: const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows;
392: for( ii=0 ; ii<ncrs_prim ; ii++) {
393: for( kk=0; kk<ndata_rows ; kk++ ) {
394: PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
395: PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
396: VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );
397: }
398: }
399: }
400: VecAssemblyBegin(src_crd);
401: VecAssemblyEnd(src_crd);
402: /*
403: Scatter the element vertex information (still in the original vertex ordering)
404: to the correct processor
405: */
406: VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
407:
408: ISDestroy( &isscat );
409: VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
410: VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
411: VecScatterDestroy( &vecscat );
412: VecDestroy( &src_crd );
413: /*
414: Put the element vertex data into a new allocation of the gdata->ele
415: */
416: PetscFree( pc_gamg->data );
417: PetscMalloc( node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data );
418: pc_gamg->data_sz = node_data_sz*ncrs_prim_new;
419: strideNew = ncrs_prim_new*ndata_rows;
420: VecGetArray( dest_crd, &array );
421: for( jj=0; jj<ndata_cols ; jj++ ) {
422: for( ii=0 ; ii<ncrs_prim_new ; ii++) {
423: for( kk=0; kk<ndata_rows ; kk++ ) {
424: PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
425: pc_gamg->data[ix] = PetscRealPart(array[jx]);
426: }
427: }
428: }
429: VecRestoreArray( dest_crd, &array );
430: VecDestroy( &dest_crd );
432: /* move A and P (columns) with new layout */
433: #if defined PETSC_GAMG_USE_LOG
434: PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
435: #endif
437: /*
438: Invert for MatGetSubMatrix
439: */
440: ISInvertPermutation( is_eq_num, ncrs_eq_new, &new_eq_indices );
441: ISSort( new_eq_indices ); /* is this needed? */
442: ISSetBlockSize( new_eq_indices, cr_bs );
443: if(is_eq_num != is_eq_num_prim) {
444: ISDestroy( &is_eq_num_prim ); /* could be same as 'is_eq_num' */
445: }
446: ISDestroy( &is_eq_num );
447: #if defined PETSC_GAMG_USE_LOG
448: PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
449: PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
450: #endif
451: /* 'a_Amat_crs' output */
452: {
453: Mat mat;
454: MatGetSubMatrix( Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat );
455:
456: *a_Amat_crs = mat;
458: if(!PETSC_TRUE){
459: PetscInt cbs, rbs;
460: MatGetBlockSizes( Cmat, &rbs, &cbs );
461: PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
462: MatGetBlockSizes( mat, &rbs, &cbs );
463: PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",mype,__FUNCT__,rbs,cbs,cr_bs);
464: }
465: }
466: MatDestroy( &Cmat );
468: #if defined PETSC_GAMG_USE_LOG
469: PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
470: #endif
471: /* prolongator */
472: {
473: IS findices;
474: PetscInt Istart,Iend;
475: Mat Pnew;
476: MatGetOwnershipRange( Pold, &Istart, &Iend );
477: #if defined PETSC_GAMG_USE_LOG
478: PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
479: #endif
480: ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);
481: ISSetBlockSize(findices,f_bs);
482: MatGetSubMatrix( Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew );
483:
484: ISDestroy( &findices );
486: if(!PETSC_TRUE){
487: PetscInt cbs, rbs;
488: MatGetBlockSizes( Pold, &rbs, &cbs );
489: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
490: MatGetBlockSizes( Pnew, &rbs, &cbs );
491: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
492: }
493: #if defined PETSC_GAMG_USE_LOG
494: PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
495: #endif
496: MatDestroy( a_P_inout );
498: /* output - repartitioned */
499: *a_P_inout = Pnew;
500: }
501: ISDestroy( &new_eq_indices );
503: *a_nactive_proc = new_npe; /* output */
504: }
506: /* outout matrix data */
507: if( !PETSC_TRUE ) {
508: PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
509: if(llev==0) {
510: sprintf(fname,"Cmat_%d.m",llev++);
511: PetscViewerASCIIOpen(wcomm,fname,&viewer);
512: PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);
513: MatView(Amat_fine, viewer );
514: PetscViewerDestroy( &viewer );
515: }
516: sprintf(fname,"Cmat_%d.m",llev++);
517: PetscViewerASCIIOpen(wcomm,fname,&viewer);
518: PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);
519: MatView(Cmat, viewer );
520: PetscViewerDestroy( &viewer );
521: }
523: return(0);
524: }
526: /* -------------------------------------------------------------------------- */
527: /*
528: PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
529: by setting data structures and options.
531: Input Parameter:
532: . pc - the preconditioner context
534: Application Interface Routine: PCSetUp()
536: Notes:
537: The interface routine PCSetUp() is not usually called directly by
538: the user, but instead is called by PCApply() if necessary.
539: */
542: PetscErrorCode PCSetUp_GAMG( PC pc )
543: {
544: PetscErrorCode ierr;
545: PC_MG *mg = (PC_MG*)pc->data;
546: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
547: Mat Pmat = pc->pmat;
548: PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
549: MPI_Comm wcomm = ((PetscObject)pc)->comm;
550: PetscMPIInt mype,npe,nactivepe;
551: Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
552: PetscReal emaxs[GAMG_MAXLEVELS];
553: IS *ASMLocalIDsArr[GAMG_MAXLEVELS],removedEqs[GAMG_MAXLEVELS];
554: PetscInt level_bs[GAMG_MAXLEVELS];
555: GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS];
556: PetscLogDouble nnz0=0.,nnztot=0.;
557: MatInfo info;
558: PetscBool stokes = PETSC_FALSE;
559:
561: MPI_Comm_rank(wcomm,&mype);
562: MPI_Comm_size(wcomm,&npe);
563: if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",mype,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);
564: if( pc_gamg->setup_count++ > 0 ) {
565: PC_MG_Levels **mglevels = mg->levels;
566: /* just do Galerkin grids */
567: Mat B,dA,dB;
568: assert(pc->setupcalled);
570: if( pc_gamg->Nlevels > 1 ) {
571: /* currently only handle case where mat and pmat are the same on coarser levels */
572: KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);
573: /* (re)set to get dirty flag */
574: KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);
575:
576: for (level=pc_gamg->Nlevels-2; level>-1; level--) {
577: /* the first time through the matrix structure has changed from repartitioning */
578: if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) {
579: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
580: MatDestroy(&mglevels[level]->A);
581: mglevels[level]->A = B;
582: }
583: else {
584: KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);
585: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
586: }
587: KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);
588: dB = B;
589: }
590: }
592: PCSetUp_MG( pc );CHKERRQ( ierr );
594: /* PCSetUp_MG seems to insists on setting this to GMRES */
595: KSPSetType( mglevels[0]->smoothd, KSPPREONLY );
597: return(0);
598: }
599: assert(pc->setupcalled == 0);
601: PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,PETSC_NULL);
603: GAMGKKTMatCreate( Pmat, stokes, &kktMatsArr[0] );
605: if( pc_gamg->data == 0 ) {
606: if( !pc_gamg->createdefaultdata ){
607: SETERRQ(wcomm,PETSC_ERR_LIB,"'createdefaultdata' not set(?) need to support NULL data");
608: }
609: if( stokes ) {
610: SETERRQ(wcomm,PETSC_ERR_LIB,"Need data (eg, PCSetCoordinates) for Stokes problems");
611: }
612: pc_gamg->createdefaultdata( pc, kktMatsArr[0].A11 );
613: }
615: /* get basic dims */
616: if( stokes ) {
617: bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */
618: }
619: else {
620: MatGetBlockSize( Pmat, &bs );
621: }
622:
623: MatGetSize( Pmat, &M, &qq );
624: if (pc_gamg->verbose) {
625: if(pc_gamg->verbose==1) MatGetInfo(Pmat,MAT_LOCAL,&info);
626: else MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
627:
628: nnz0 = info.nz_used;
629: nnztot = info.nz_used;
630: PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
631: mype,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
632: (int)(nnz0/(PetscReal)M),npe);
633: }
635: /* Get A_i and R_i */
636: for ( level=0, Aarr[0]=Pmat, nactivepe = npe; /* hard wired stopping logic */
637: level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */
638: level++ ){
639: level1 = level + 1;
640: #if defined PETSC_GAMG_USE_LOG
641: PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
642: #if (defined GAMG_STAGES)
643: PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
644: #endif
645: #endif
646: /* deal with Stokes, get sub matrices */
647: if( level > 0 ) {
648: GAMGKKTMatCreate( Aarr[level], stokes, &kktMatsArr[level] );
649: }
650: { /* construct prolongator */
651: Mat Gmat;
652: PetscCoarsenData *agg_lists;
653: Mat Prol11,Prol22;
655: level_bs[level] = bs;
656: pc_gamg->graph( pc,kktMatsArr[level].A11, &Gmat );
657: pc_gamg->coarsen( pc, &Gmat, &agg_lists );
658: pc_gamg->prolongator( pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11 );
660: /* could have failed to create new level */
661: if( Prol11 ){
662: /* get new block size of coarse matrices */
663: MatGetBlockSizes( Prol11, PETSC_NULL, &bs );
665: if( stokes ) {
666: if(!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method.");
667: /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */
668: pc_gamg->formkktprol( pc, Prol11, kktMatsArr[level].A21, &Prol22 );
669: }
670:
671: if( pc_gamg->optprol ){
672: /* smooth */
673: pc_gamg->optprol( pc, kktMatsArr[level].A11, &Prol11 );
674: }
675:
676: if( stokes ) {
677: IS is_row[2] = {kktMatsArr[level].prim_is,kktMatsArr[level].constr_is};
678: Mat a[4] = {Prol11, PETSC_NULL, PETSC_NULL, Prol22 };
679: MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1] );
680: }
681: else {
682: Parr[level1] = Prol11;
683: }
684: }
685: else Parr[level1] = PETSC_NULL;
687: if ( pc_gamg->use_aggs_in_gasm ) {
688: PetscCDGetASMBlocks(agg_lists, level_bs[level], &nASMBlocksArr[level], &ASMLocalIDsArr[level] );
689: }
691: PetscCDGetRemovedIS( agg_lists, &removedEqs[level] );
693: MatDestroy( &Gmat );
694: PetscCDDestroy( agg_lists );
695: } /* construct prolongator scope */
696: #if defined PETSC_GAMG_USE_LOG
697: PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
698: #endif
699: /* cache eigen estimate */
700: if( pc_gamg->emax_id != -1 ){
701: PetscBool flag;
702: PetscObjectComposedDataGetReal( (PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag );
703: CHKERRQ( ierr );
704: if( !flag ) emaxs[level] = -1.;
705: }
706: else emaxs[level] = -1.;
707: if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
708: if( !Parr[level1] ) {
709: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level);
710: break;
711: }
712: #if defined PETSC_GAMG_USE_LOG
713: PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
714: #endif
716: createLevel( pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
717: stokes, &Parr[level1], &Aarr[level1], &nactivepe );
718:
720: #if defined PETSC_GAMG_USE_LOG
721: PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
722: #endif
723: MatGetSize( Aarr[level1], &M, &qq );
725: if (pc_gamg->verbose > 0){
726: PetscInt NN = M;
727: if(pc_gamg->verbose==1) {
728: MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
729: MatGetLocalSize( Aarr[level1], &NN, &qq );
730: }
731: else MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info );
733:
734: nnztot += info.nz_used;
735: PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
736: mype,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
737: (int)(info.nz_used/(PetscReal)NN), nactivepe );
738:
739: }
741: /* stop if one node -- could pull back for singular problems */
742: if( M/pc_gamg->data_cell_cols < 2 ) {
743: level++;
744: break;
745: }
746: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
747: PetscLogStagePop(); CHKERRQ( ierr );
748: #endif
749: } /* levels */
751: if( pc_gamg->data ) {
752: PetscFree( pc_gamg->data ); CHKERRQ( ierr );
753: pc_gamg->data = PETSC_NULL;
754: }
756: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
757: pc_gamg->Nlevels = level + 1;
758: fine_level = level;
759: PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);
761: /* simple setup */
762: if( !PETSC_TRUE ){
763: PC_MG_Levels **mglevels = mg->levels;
764: for (lidx=0,level=pc_gamg->Nlevels-1;
765: lidx<fine_level;
766: lidx++, level--){
767: PCMGSetInterpolation( pc, lidx+1, Parr[level] );
768: KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
769: MatDestroy( &Parr[level] );
770: MatDestroy( &Aarr[level] );
771: }
772: KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );
773:
774: PCSetUp_MG( pc ); CHKERRQ( ierr );
775: }
776: else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */
777: /* set default smoothers & set operators */
778: for ( lidx = 1, level = pc_gamg->Nlevels-2;
779: lidx <= fine_level;
780: lidx++, level--) {
781: KSP smoother;
782: PC subpc;
784: PCMGGetSmoother( pc, lidx, &smoother );
785: KSPGetPC( smoother, &subpc );
786:
787: KSPSetNormType( smoother, KSP_NORM_NONE );
788: /* set ops */
789: KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
790: PCMGSetInterpolation( pc, lidx, Parr[level+1] );
792: /* create field split PC, get subsmoother */
793: if( stokes ) {
794: KSP *ksps;
795: PetscInt nn;
796: PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);
797: PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);
798: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
799: smoother = ksps[0];
800: KSPGetPC( smoother, &subpc );
801: PetscFree( ksps );
802: }
803: GAMGKKTMatDestroy( &kktMatsArr[level] );
805: /* set defaults */
806: KSPSetType( smoother, KSPCHEBYSHEV );
808: /* override defaults and command line args (!) */
809: if ( pc_gamg->use_aggs_in_gasm ) {
810: PetscInt sz;
811: IS *is;
813: sz = nASMBlocksArr[level];
814: is = ASMLocalIDsArr[level];
815: PCSetType( subpc, PCGASM );
816: if(sz==0){
817: IS is;
818: PetscInt my0,kk;
819: MatGetOwnershipRange( Aarr[level], &my0, &kk );
820: ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is );
821: PCGASMSetSubdomains( subpc, 1, &is, PETSC_NULL );
822: ISDestroy( &is );
823: }
824: else {
825: PetscInt kk;
826: PCGASMSetSubdomains( subpc, sz, is, PETSC_NULL );
827: for(kk=0;kk<sz;kk++){
828: ISDestroy( &is[kk] );
829: }
830: PetscFree( is );
831: }
832: PCGASMSetOverlap( subpc, 0 );
834: ASMLocalIDsArr[level] = PETSC_NULL;
835: nASMBlocksArr[level] = 0;
836: PCGASMSetType( subpc, PC_GASM_BASIC );
837: }
838: else {
839: PCSetType( subpc, PCJACOBI );
840: }
841: }
842: {
843: /* coarse grid */
844: KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
845: Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
846: PCMGGetSmoother( pc, lidx, &smoother );
847: KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN );
848: KSPSetNormType( smoother, KSP_NORM_NONE );
849: KSPGetPC( smoother, &subpc );
850: PCSetType( subpc, PCBJACOBI );
851: PCSetUp( subpc );
852: PCBJacobiGetSubKSP(subpc,&ii,&first,&k2); assert(ii==1);
853: KSPGetPC(k2[0],&pc2);
854: PCSetType( pc2, PCLU );
855: }
857: /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
858: PetscObjectOptionsBegin( (PetscObject)pc );
859: PCSetFromOptions_MG( pc );
860: PetscOptionsEnd();
861: if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
863: /* create cheby smoothers */
864: for ( lidx = 1, level = pc_gamg->Nlevels-2;
865: lidx <= fine_level;
866: lidx++, level--) {
867: KSP smoother;
868: PetscBool flag;
869: PC subpc;
871: PCMGGetSmoother( pc, lidx, &smoother );
872: KSPGetPC( smoother, &subpc );
874: /* create field split PC, get subsmoother */
875: if( stokes ) {
876: KSP *ksps;
877: PetscInt nn;
878: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
879: smoother = ksps[0];
880: KSPGetPC( smoother, &subpc );
881: PetscFree( ksps );
882: }
884: /* do my own cheby */
885: PetscObjectTypeCompare( (PetscObject)smoother, KSPCHEBYSHEV, &flag );
886: if( flag ) {
887: PetscReal emax, emin;
888: PetscObjectTypeCompare( (PetscObject)subpc, PCJACOBI, &flag );
889: if( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
890: else{ /* eigen estimate 'emax' */
891: KSP eksp; Mat Lmat = Aarr[level];
892: Vec bb, xx;
894: MatGetVecs( Lmat, &bb, 0 );
895: MatGetVecs( Lmat, &xx, 0 );
896: {
897: PetscRandom rctx;
898: PetscRandomCreate(wcomm,&rctx);
899: PetscRandomSetFromOptions(rctx);
900: VecSetRandom(bb,rctx);
901: PetscRandomDestroy( &rctx );
902: }
904: if( removedEqs[level] ) {
905: /* being very careful - zeroing out BC rows (this is not done in agg.c estimates) */
906: PetscScalar *zeros;
907: PetscInt ii,jj, *idx_bs, sz, bs=level_bs[level];
908: const PetscInt *idx;
909: ISGetLocalSize( removedEqs[level], &sz );
910: PetscMalloc( bs*sz*sizeof(PetscScalar), &zeros );
911: for(ii=0;ii<bs*sz;ii++) zeros[ii] = 0.;
912: PetscMalloc( bs*sz*sizeof(PetscInt), &idx_bs );
913: ISGetIndices( removedEqs[level], &idx);
914: for(ii=0;ii<sz;ii++) {
915: for(jj=0;jj<bs;jj++) {
916: idx_bs[ii] = bs*idx[ii]+jj;
917: }
918: }
919: ISRestoreIndices( removedEqs[level], &idx );
920: if( sz > 0 ) {
921: VecSetValues( bb, sz, idx_bs, zeros, INSERT_VALUES );
922: }
923: PetscFree( idx_bs );
924: PetscFree( zeros );
925: VecAssemblyBegin(bb);
926: VecAssemblyEnd(bb);
927: }
928: KSPCreate( wcomm, &eksp );
929: KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
930:
931: KSPSetNormType( eksp, KSP_NORM_NONE );
932: KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
933: KSPAppendOptionsPrefix( eksp, "gamg_est_");
934: KSPSetFromOptions( eksp );
936: KSPSetInitialGuessNonzero( eksp, PETSC_FALSE );
937: KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
938: KSPSetComputeSingularValues( eksp,PETSC_TRUE );
940: /* set PC type to be same as smoother */
941: KSPSetPC( eksp, subpc ); CHKERRQ( ierr );
943: /* solve - keep stuff out of logging */
944: PetscLogEventDeactivate(KSP_Solve);
945: PetscLogEventDeactivate(PC_Apply);
946: KSPSolve( eksp, bb, xx );
947: PetscLogEventActivate(KSP_Solve);
948: PetscLogEventActivate(PC_Apply);
949:
950: KSPComputeExtremeSingularValues( eksp, &emax, &emin );
951:
952: VecDestroy( &xx );
953: VecDestroy( &bb );
954: KSPDestroy( &eksp );
955:
956: if( pc_gamg->verbose > 0 ) {
957: PetscInt N1, tt;
958: MatGetSize( Aarr[level], &N1, &tt );
959: PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
960: }
961: }
962: {
963: PetscInt N1, N0;
964: MatGetSize( Aarr[level], &N1, PETSC_NULL );
965: MatGetSize( Aarr[level+1], &N0, PETSC_NULL );
966: /* heuristic - is this crap? */
967: emin = 1.*emax/((PetscReal)N1/(PetscReal)N0);
968: emax *= 1.05;
969: }
970: KSPChebyshevSetEigenvalues( smoother, emax, emin );
971: } /* setup checby flag */
973: if( removedEqs[level] ) {
974: ISDestroy( &removedEqs[level] );
975: }
976: } /* non-coarse levels */
977:
978: /* clean up */
979: for(level=1;level<pc_gamg->Nlevels;level++){
980: MatDestroy( &Parr[level] );
981: MatDestroy( &Aarr[level] );
982: }
984: PCSetUp_MG( pc );CHKERRQ( ierr );
985:
986: if( PETSC_FALSE ){
987: KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
988: PCMGGetSmoother( pc, 0, &smoother );
989: KSPSetType( smoother, KSPPREONLY );
990: }
991: }
992: else {
993: KSP smoother;
994: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__);
995: PCMGGetSmoother( pc, 0, &smoother );
996: KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );
997: KSPSetType( smoother, KSPPREONLY );
998: PCSetUp_MG( pc );CHKERRQ( ierr );
999: }
1001: return(0);
1002: }
1004: /* ------------------------------------------------------------------------- */
1005: /*
1006: PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
1007: that was created with PCCreate_GAMG().
1009: Input Parameter:
1010: . pc - the preconditioner context
1012: Application Interface Routine: PCDestroy()
1013: */
1016: PetscErrorCode PCDestroy_GAMG( PC pc )
1017: {
1018: PetscErrorCode ierr;
1019: PC_MG *mg = (PC_MG*)pc->data;
1020: PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx;
1023: PCReset_GAMG( pc );
1024: PetscFree( pc_gamg );
1025: PCDestroy_MG( pc );
1026: return(0);
1027: }
1032: /*@
1033: PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
1034: processor reduction.
1036: Not Collective on PC
1038: Input Parameters:
1039: . pc - the preconditioner context
1042: Options Database Key:
1043: . -pc_gamg_process_eq_limit
1045: Level: intermediate
1047: Concepts: Unstructured multrigrid preconditioner
1049: .seealso: ()
1050: @*/
1051: PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n)
1052: {
1054:
1057: PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
1058: return(0);
1059: }
1061: EXTERN_C_BEGIN
1064: PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
1065: {
1066: PC_MG *mg = (PC_MG*)pc->data;
1067: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1068:
1070: if(n>0) pc_gamg->min_eq_proc = n;
1071: return(0);
1072: }
1073: EXTERN_C_END
1077: /*@
1078: PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
1080: Collective on PC
1082: Input Parameters:
1083: . pc - the preconditioner context
1086: Options Database Key:
1087: . -pc_gamg_coarse_eq_limit
1089: Level: intermediate
1091: Concepts: Unstructured multrigrid preconditioner
1093: .seealso: ()
1094: @*/
1095: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
1096: {
1098:
1101: PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
1102: return(0);
1103: }
1105: EXTERN_C_BEGIN
1108: PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1109: {
1110: PC_MG *mg = (PC_MG*)pc->data;
1111: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1112:
1114: if(n>0) pc_gamg->coarse_eq_limit = n;
1115: return(0);
1116: }
1117: EXTERN_C_END
1121: /*@
1122: PCGAMGSetRepartitioning - Repartition the coarse grids
1124: Collective on PC
1126: Input Parameters:
1127: . pc - the preconditioner context
1130: Options Database Key:
1131: . -pc_gamg_repartition
1133: Level: intermediate
1135: Concepts: Unstructured multrigrid preconditioner
1137: .seealso: ()
1138: @*/
1139: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1140: {
1142:
1145: PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1146: return(0);
1147: }
1149: EXTERN_C_BEGIN
1152: PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1153: {
1154: PC_MG *mg = (PC_MG*)pc->data;
1155: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1156:
1158: pc_gamg->repart = n;
1159: return(0);
1160: }
1161: EXTERN_C_END
1165: /*@
1166: PCGAMGSetUseASMAggs -
1168: Collective on PC
1170: Input Parameters:
1171: . pc - the preconditioner context
1174: Options Database Key:
1175: . -pc_gamg_use_agg_gasm
1177: Level: intermediate
1179: Concepts: Unstructured multrigrid preconditioner
1181: .seealso: ()
1182: @*/
1183: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1184: {
1186:
1189: PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1190: return(0);
1191: }
1193: EXTERN_C_BEGIN
1196: PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1197: {
1198: PC_MG *mg = (PC_MG*)pc->data;
1199: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1200:
1202: pc_gamg->use_aggs_in_gasm = n;
1203: return(0);
1204: }
1205: EXTERN_C_END
1209: /*@
1210: PCGAMGSetNlevels -
1212: Not collective on PC
1214: Input Parameters:
1215: . pc - the preconditioner context
1218: Options Database Key:
1219: . -pc_mg_levels
1221: Level: intermediate
1223: Concepts: Unstructured multrigrid preconditioner
1225: .seealso: ()
1226: @*/
1227: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1228: {
1230:
1233: PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1234: return(0);
1235: }
1237: EXTERN_C_BEGIN
1240: PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1241: {
1242: PC_MG *mg = (PC_MG*)pc->data;
1243: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1244:
1246: pc_gamg->Nlevels = n;
1247: return(0);
1248: }
1249: EXTERN_C_END
1253: /*@
1254: PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1256: Not collective on PC
1258: Input Parameters:
1259: . pc - the preconditioner context
1262: Options Database Key:
1263: . -pc_gamg_threshold
1265: Level: intermediate
1267: Concepts: Unstructured multrigrid preconditioner
1269: .seealso: ()
1270: @*/
1271: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1272: {
1274:
1277: PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1278: return(0);
1279: }
1281: EXTERN_C_BEGIN
1284: PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1285: {
1286: PC_MG *mg = (PC_MG*)pc->data;
1287: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1288:
1290: pc_gamg->threshold = n;
1291: return(0);
1292: }
1293: EXTERN_C_END
1297: /*@
1298: PCGAMGSetType - Set solution method - calls sub create method
1300: Collective on PC
1302: Input Parameters:
1303: . pc - the preconditioner context
1306: Options Database Key:
1307: . -pc_gamg_type
1309: Level: intermediate
1311: Concepts: Unstructured multrigrid preconditioner
1313: .seealso: ()
1314: @*/
1315: PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type )
1316: {
1318:
1321: PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type));
1322:
1323: return(0);
1324: }
1325:
1326: EXTERN_C_BEGIN
1329: PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type )
1330: {
1331: PetscErrorCode ierr,(*r)(PC);
1332:
1334: PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r);
1335:
1337: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1339: /* call sub create method */
1340: (*r)(pc);
1342: return(0);
1343: }
1344: EXTERN_C_END
1348: PetscErrorCode PCSetFromOptions_GAMG( PC pc )
1349: {
1350: PetscErrorCode ierr;
1351: PC_MG *mg = (PC_MG*)pc->data;
1352: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1353: PetscBool flag;
1354: MPI_Comm wcomm = ((PetscObject)pc)->comm;
1357: PetscOptionsHead("GAMG options");
1358: {
1359: /* -pc_gamg_verbose */
1360: PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1361: "none", pc_gamg->verbose,
1362: &pc_gamg->verbose, PETSC_NULL );
1363:
1364:
1365: /* -pc_gamg_repartition */
1366: PetscOptionsBool("-pc_gamg_repartition",
1367: "Repartion coarse grids (false)",
1368: "PCGAMGRepartitioning",
1369: pc_gamg->repart,
1370: &pc_gamg->repart,
1371: &flag);
1372:
1373:
1374: /* -pc_gamg_use_agg_gasm */
1375: PetscOptionsBool("-pc_gamg_use_agg_gasm",
1376: "Use aggregation agragates for GASM smoother (false)",
1377: "PCGAMGUseASMAggs",
1378: pc_gamg->use_aggs_in_gasm,
1379: &pc_gamg->use_aggs_in_gasm,
1380: &flag);
1381:
1382:
1383: /* -pc_gamg_process_eq_limit */
1384: PetscOptionsInt("-pc_gamg_process_eq_limit",
1385: "Limit (goal) on number of equations per process on coarse grids",
1386: "PCGAMGSetProcEqLim",
1387: pc_gamg->min_eq_proc,
1388: &pc_gamg->min_eq_proc,
1389: &flag );
1390:
1391:
1392: /* -pc_gamg_coarse_eq_limit */
1393: PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1394: "Limit on number of equations for the coarse grid",
1395: "PCGAMGSetCoarseEqLim",
1396: pc_gamg->coarse_eq_limit,
1397: &pc_gamg->coarse_eq_limit,
1398: &flag );
1399:
1401: /* -pc_gamg_threshold */
1402: PetscOptionsReal("-pc_gamg_threshold",
1403: "Relative threshold to use for dropping edges in aggregation graph",
1404: "PCGAMGSetThreshold",
1405: pc_gamg->threshold,
1406: &pc_gamg->threshold,
1407: &flag );
1408:
1409: if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1411: PetscOptionsInt("-pc_mg_levels",
1412: "Set number of MG levels",
1413: "PCGAMGSetNlevels",
1414: pc_gamg->Nlevels,
1415: &pc_gamg->Nlevels,
1416: &flag );
1417: }
1418: PetscOptionsTail();
1420: return(0);
1421: }
1423: /* -------------------------------------------------------------------------- */
1424: /*MC
1425: PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1426: - This is the entry point to GAMG, registered in pcregis.c
1428: Options Database Keys:
1429: Multigrid options(inherited)
1430: + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1431: . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1432: . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1433: - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1435: Level: intermediate
1437: Concepts: multigrid
1439: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1440: PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1441: PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1442: PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1443: PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1444: M*/
1445: EXTERN_C_BEGIN
1448: PetscErrorCode PCCreate_GAMG( PC pc )
1449: {
1450: PetscErrorCode ierr;
1451: PC_GAMG *pc_gamg;
1452: PC_MG *mg;
1453: #if defined PETSC_GAMG_USE_LOG
1454: static long count = 0;
1455: #endif
1459: /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1460: PCSetType( pc, PCMG ); /* calls PCCreate_MG() and MGCreate_Private() */
1461: PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG );
1463: /* create a supporting struct and attach it to pc */
1464: PetscNewLog( pc, PC_GAMG, &pc_gamg );
1465: mg = (PC_MG*)pc->data;
1466: mg->galerkin = 2; /* Use Galerkin, but it is computed externally */
1467: mg->innerctx = pc_gamg;
1469: pc_gamg->setup_count = 0;
1470: /* these should be in subctx but repartitioning needs simple arrays */
1471: pc_gamg->data_sz = 0;
1472: pc_gamg->data = 0;
1474: /* register AMG type */
1475: if( !GAMGList ){
1476: PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);
1477: PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);
1478: }
1480: /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1481: pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1482: pc->ops->setup = PCSetUp_GAMG;
1483: pc->ops->reset = PCReset_GAMG;
1484: pc->ops->destroy = PCDestroy_GAMG;
1486: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1487: "PCGAMGSetProcEqLim_C",
1488: "PCGAMGSetProcEqLim_GAMG",
1489: PCGAMGSetProcEqLim_GAMG);
1490:
1492: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1493: "PCGAMGSetCoarseEqLim_C",
1494: "PCGAMGSetCoarseEqLim_GAMG",
1495: PCGAMGSetCoarseEqLim_GAMG);
1496:
1498: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1499: "PCGAMGSetRepartitioning_C",
1500: "PCGAMGSetRepartitioning_GAMG",
1501: PCGAMGSetRepartitioning_GAMG);
1502:
1504: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1505: "PCGAMGSetUseASMAggs_C",
1506: "PCGAMGSetUseASMAggs_GAMG",
1507: PCGAMGSetUseASMAggs_GAMG);
1508:
1510: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1511: "PCGAMGSetThreshold_C",
1512: "PCGAMGSetThreshold_GAMG",
1513: PCGAMGSetThreshold_GAMG);
1514:
1516: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1517: "PCGAMGSetType_C",
1518: "PCGAMGSetType_GAMG",
1519: PCGAMGSetType_GAMG);
1520:
1522: pc_gamg->repart = PETSC_FALSE;
1523: pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1524: pc_gamg->min_eq_proc = 100;
1525: pc_gamg->coarse_eq_limit = 800;
1526: pc_gamg->threshold = 0.001;
1527: pc_gamg->Nlevels = GAMG_MAXLEVELS;
1528: pc_gamg->verbose = 0;
1529: pc_gamg->emax_id = -1;
1531: /* private events */
1532: #if defined PETSC_GAMG_USE_LOG
1533: if( count++ == 0 ) {
1534: PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1535: PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1536: /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1537: /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1538: /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1539: PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1540: PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1541: PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1542: PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1543: PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1544: PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1545: PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1546: PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1547: PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1548: PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1549: PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1550: PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1552: /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1553: /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1554: /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1555: /* create timer stages */
1556: #if defined GAMG_STAGES
1557: {
1558: char str[32];
1559: sprintf(str,"MG Level %d (finest)",0);
1560: PetscLogStageRegister(str, &gamg_stages[0]);
1561: PetscInt lidx;
1562: for (lidx=1;lidx<9;lidx++){
1563: sprintf(str,"MG Level %d",lidx);
1564: PetscLogStageRegister(str, &gamg_stages[lidx]);
1565: }
1566: }
1567: #endif
1568: }
1569: #endif
1570: /* general events */
1571: #if defined PETSC_USE_LOG
1572: PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1573: PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1574: PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1575: PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1576: PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1577: PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1578: PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1579: PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);
1580: #endif
1582: /* instantiate derived type */
1583: PetscOptionsHead("GAMG options");
1584: {
1585: char tname[256] = GAMGAGG;
1586: PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",
1587: GAMGList, tname, tname, sizeof(tname), PETSC_NULL );
1588:
1589: PCGAMGSetType( pc, tname );
1590: }
1591: PetscOptionsTail();
1593: return(0);
1594: }
1595: EXTERN_C_END