Actual source code: tools.c
petsc-3.3-p5 2012-12-01
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include "petsc-private/matimpl.h" /*I "petscmat.h" I*/
5: #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6: #include <petsc-private/kspimpl.h>
8: /* -------------------------------------------------------------------------- */
9: /*
10: PCGAMGCreateGraph - create simple scaled scalar graph from matrix
11:
12: Input Parameter:
13: . Amat - matrix
14: Output Parameter:
15: . a_Gmaat - eoutput scalar graph (symmetric?)
16: */
19: PetscErrorCode PCGAMGCreateGraph( const Mat Amat, Mat *a_Gmat )
20: {
22: PetscInt Istart,Iend,Ii,jj,ncols,nloc,NN,MM,bs;
23: PetscMPIInt mype, npe;
24: MPI_Comm wcomm = ((PetscObject)Amat)->comm;
25: Mat Gmat;
28: MPI_Comm_rank(wcomm,&mype);
29: MPI_Comm_size(wcomm,&npe);
30: MatGetOwnershipRange( Amat, &Istart, &Iend );
31: MatGetSize( Amat, &MM, &NN );
32: MatGetBlockSize( Amat, &bs );
33: nloc = (Iend-Istart)/bs;
34:
35: #if defined PETSC_GAMG_USE_LOG
36: PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);
37: #endif
38: if( bs > 1 ) {
39: const PetscScalar *vals;
40: const PetscInt *idx;
41: PetscInt *d_nnz, *o_nnz;
42: /* count nnz, there is sparcity in here so this might not be enough */
43: PetscMalloc( nloc*sizeof(PetscInt), &d_nnz );
44: PetscMalloc( nloc*sizeof(PetscInt), &o_nnz );
45: for ( Ii = Istart, jj = 0 ; Ii < Iend ; Ii += bs, jj++ ) {
46: MatGetRow(Amat,Ii,&ncols,0,0);
47: d_nnz[jj] = ncols; /* very pessimistic */
48: o_nnz[jj] = ncols;
49: if( d_nnz[jj] > nloc ) d_nnz[jj] = nloc;
50: if( o_nnz[jj] > (NN/bs-nloc) ) o_nnz[jj] = NN/bs-nloc;
51: MatRestoreRow(Amat,Ii,&ncols,0,0);
52: }
54: /* get scalar copy (norms) of matrix -- AIJ specific!!! */
55: MatCreateAIJ( wcomm, nloc, nloc,
56: PETSC_DETERMINE, PETSC_DETERMINE,
57: 0, d_nnz, 0, o_nnz, &Gmat );
59: PetscFree( d_nnz );
60: PetscFree( o_nnz );
62: for( Ii = Istart; Ii < Iend ; Ii++ ) {
63: PetscInt dest_row = Ii/bs;
64: MatGetRow(Amat,Ii,&ncols,&idx,&vals);
65: for(jj=0;jj<ncols;jj++){
66: PetscInt dest_col = idx[jj]/bs;
67: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
68: MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);
69: }
70: MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);
71: }
72: MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);
73: MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);
74: }
75: else {
76: /* just copy scalar matrix - abs() not taken here but scaled later */
77: MatDuplicate( Amat, MAT_COPY_VALUES, &Gmat );
78: }
80: #if defined PETSC_GAMG_USE_LOG
81: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
82: #endif
84: *a_Gmat = Gmat;
86: return(0);
87: }
89: /* -------------------------------------------------------------------------- */
90: /*
91: PCGAMGFilterGraph - filter graph and symetrize if needed
92:
93: Input Parameter:
94: . vfilter - threshold paramter [0,1)
95: . symm - symetrize?
96: In/Output Parameter:
97: . a_Gmat - original graph
98: */
101: PetscErrorCode PCGAMGFilterGraph( Mat *a_Gmat, const PetscReal vfilter, const PetscBool symm, const PetscInt verbose )
102: {
104: PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc;
105: PetscMPIInt mype, npe;
106: Mat Gmat = *a_Gmat, tGmat, matTrans;
107: MPI_Comm wcomm = ((PetscObject)Gmat)->comm;
108: const PetscScalar *vals;
109: const PetscInt *idx;
110: PetscInt *d_nnz, *o_nnz;
111: Vec diag;
114: MPI_Comm_rank(wcomm,&mype);
115: MPI_Comm_size(wcomm,&npe);
116: MatGetOwnershipRange( Gmat, &Istart, &Iend );
117: nloc = Iend - Istart;
118: MatGetSize( Gmat, &MM, &NN );
119: #if defined PETSC_GAMG_USE_LOG
120: PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);
121: #endif
122: /* scale Gmat so filter works */
123: MatGetVecs( Gmat, &diag, 0 );
124: MatGetDiagonal( Gmat, diag );
125: VecReciprocal( diag );
126: VecSqrtAbs( diag );
127: MatDiagonalScale( Gmat, diag, diag );
128: VecDestroy( &diag );
130: if( symm ) {
131: MatTranspose( Gmat, MAT_INITIAL_MATRIX, &matTrans );
132: }
134: /* filter - dup zeros out matrix */
135: PetscMalloc( nloc*sizeof(PetscInt), &d_nnz );
136: PetscMalloc( nloc*sizeof(PetscInt), &o_nnz );
137: for( Ii = Istart, jj = 0 ; Ii < Iend; Ii++, jj++ ){
138: MatGetRow(Gmat,Ii,&ncols,PETSC_NULL,PETSC_NULL);
139: d_nnz[jj] = ncols;
140: o_nnz[jj] = ncols;
141: MatRestoreRow(Gmat,Ii,&ncols,PETSC_NULL,PETSC_NULL);
142: if( symm ) {
143: MatGetRow(matTrans,Ii,&ncols,PETSC_NULL,PETSC_NULL);
144: d_nnz[jj] += ncols;
145: o_nnz[jj] += ncols;
146: MatRestoreRow(matTrans,Ii,&ncols,PETSC_NULL,PETSC_NULL);
147: }
148: if( d_nnz[jj] > nloc ) d_nnz[jj] = nloc;
149: if( o_nnz[jj] > (MM-nloc) ) o_nnz[jj] = MM - nloc;
150: }
151: MatCreateAIJ( wcomm, nloc, nloc, MM, MM, 0, d_nnz, 0, o_nnz, &tGmat );
152:
153: PetscFree( d_nnz );
154: PetscFree( o_nnz );
155: if( symm ) {
156: MatDestroy( &matTrans );
157: }
159: for( Ii = Istart, nnz0 = nnz1 = 0 ; Ii < Iend; Ii++ ){
160: MatGetRow(Gmat,Ii,&ncols,&idx,&vals);
161: for(jj=0;jj<ncols;jj++,nnz0++){
162: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
163: if( PetscRealPart(sv) > vfilter ) {
164: nnz1++;
165: if( symm ) {
166: sv *= 0.5;
167: MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);
168: MatSetValues(tGmat,1,&idx[jj],1,&Ii,&sv,ADD_VALUES);
169: }
170: else {
171: MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,ADD_VALUES);
172: }
173: }
174: }
175: MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);
176: }
177: MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);
178: MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);
180: #if defined PETSC_GAMG_USE_LOG
181: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
182: #endif
184: if( verbose ) {
185: if( verbose == 1 ) {
186: PetscPrintf(wcomm,"\t[%d]%s %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%d)\n",mype,__FUNCT__,
187: 100.*(double)nnz1/(double)nnz0,vfilter,(double)nnz0/(double)nloc,MM);
188: }
189: else {
190: PetscInt nnz[2] = {nnz0,nnz1},out[2];
191: MPI_Allreduce( nnz, out, 2, MPIU_INT, MPI_SUM, wcomm );
192: PetscPrintf(wcomm,"\t[%d]%s %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%d)\n",mype,__FUNCT__,
193: 100.*(double)out[1]/(double)out[0],vfilter,(double)out[0]/(double)MM,MM);
194: }
195: }
196:
197: MatDestroy( &Gmat );
199: *a_Gmat = tGmat;
201: return(0);
202: }
204: /* -------------------------------------------------------------------------- */
205: /*
206: PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have > 1 pe
208: Input Parameter:
209: . Gmat - MPIAIJ matrix for scattters
210: . data_sz - number of data terms per node (# cols in output)
211: . data_in[nloc*data_sz] - column oriented data
212: Output Parameter:
213: . a_stride - numbrt of rows of output
214: . a_data_out[stride*data_sz] - output data with ghosts
215: */
218: PetscErrorCode PCGAMGGetDataWithGhosts( const Mat Gmat,
219: const PetscInt data_sz,
220: const PetscReal data_in[],
221: PetscInt *a_stride,
222: PetscReal **a_data_out
223: )
224: {
226: PetscMPIInt mype,npe;
227: MPI_Comm wcomm = ((PetscObject)Gmat)->comm;
228: Vec tmp_crds;
229: Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data;
230: PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc;
231: PetscScalar *data_arr;
232: PetscReal *datas;
233: PetscBool isMPIAIJ;
236: PetscObjectTypeCompare( (PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ );
237: MPI_Comm_rank(wcomm,&mype);
238: MPI_Comm_size(wcomm,&npe);
239: MatGetOwnershipRange( Gmat, &my0, &Iend );
240: nloc = Iend - my0;
241: VecGetLocalSize( mpimat->lvec, &num_ghosts );
242: nnodes = num_ghosts + nloc;
243: *a_stride = nnodes;
244: MatGetVecs( Gmat, &tmp_crds, 0 );
246: PetscMalloc( data_sz*nnodes*sizeof(PetscReal), &datas);
247: for(dir=0; dir<data_sz; dir++) {
248: /* set local, and global */
249: for(kk=0; kk<nloc; kk++) {
250: PetscInt gid = my0 + kk;
251: PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */
252: datas[dir*nnodes + kk] = PetscRealPart(crd);
253: VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES );
254: }
255: VecAssemblyBegin( tmp_crds );
256: VecAssemblyEnd( tmp_crds );
257: /* get ghost datas */
258: VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
259:
260: VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
261:
262: VecGetArray( mpimat->lvec, &data_arr );
263: for(kk=nloc,jj=0;jj<num_ghosts;kk++,jj++){
264: datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]);
265: }
266: VecRestoreArray( mpimat->lvec, &data_arr );
267: }
268: VecDestroy(&tmp_crds);
270: *a_data_out = datas;
272: return(0);
273: }
276: /* hash table stuff - simple, not dymanic, key >= 0, has table
277: *
278: * GAMGTableCreate
279: */
280: /* avoid overflow */
281: #define GAMG_HASH(key) ((7*key)%a_tab->size)
282: PetscErrorCode GAMGTableCreate( PetscInt a_size, GAMGHashTable *a_tab )
283: {
285: PetscInt kk;
286: a_tab->size = a_size;
287: PetscMalloc(a_size*sizeof(PetscInt), &a_tab->table );
288: PetscMalloc(a_size*sizeof(PetscInt), &a_tab->data );
289: for(kk=0;kk<a_size;kk++) a_tab->table[kk] = -1;
290: return 0;
291: }
293: PetscErrorCode GAMGTableDestroy( GAMGHashTable *a_tab )
294: {
296: PetscFree( a_tab->table );
297: PetscFree( a_tab->data );
298: return 0;
299: }
301: PetscErrorCode GAMGTableAdd( GAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data )
302: {
303: PetscInt kk,idx;
304: if(a_key<0)SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Table size %d too small.",a_tab->size);
305: for( kk = 0, idx = GAMG_HASH(a_key) ; kk < a_tab->size ; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1 ){
306: if( a_tab->table[idx] == a_key ) {
307: /* exists */
308: assert(0); /* not used this way now */
309: a_tab->data[idx] = a_data;
310: break;
311: }
312: else if( a_tab->table[idx] == -1 ) {
313: /* add */
314: a_tab->table[idx] = a_key;
315: a_tab->data[idx] = a_data;
316: break;
317: }
318: }
319: if(kk==a_tab->size) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Table size %d too small.",a_tab->size);
320: return 0;
321: }
323: PetscErrorCode GAMGTableFind( GAMGHashTable *a_tab, PetscInt a_key, PetscInt *a_data )
324: {
325: PetscInt kk,idx;
326: if(a_key<0)SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Table size %d too small.",a_tab->size);
327: for( kk = 0, idx = GAMG_HASH(a_key) ; kk < a_tab->size ; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1 ){
328: if( a_tab->table[idx] == a_key ) {
329: *a_data = a_tab->data[idx];
330: break;
331: }
332: else if( a_tab->table[idx] == -1 ) {
333: /* not here */
334: *a_data = -1;
335: break;
336: }
337: }
338: if(kk==a_tab->size) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_USER,"Table size %d too small.",a_tab->size);
339: return 0;
340: }