Actual source code: ex76.c

petsc-master 2020-10-23
Report Typos and Errors
  1: #include <petscksp.h>

  3: static char help[] = "Solves a linear system using PCHPDDM.\n\n";

  5: int main(int argc,char **args)
  6: {
  7:   Vec            x,b;        /* computed solution and RHS */
  8:   Mat            A,aux,X,B;  /* linear system matrix */
  9:   KSP            ksp;        /* linear solver context */
 10:   PC             pc;
 11:   IS             is,sizes;
 12:   const PetscInt *idx;
 13:   PetscMPIInt    rank,size;
 14:   PetscInt       m,N = 1;
 15:   const char     *deft = MATAIJ;
 16:   PetscViewer    viewer;
 17:   char           dir[PETSC_MAX_PATH_LEN],name[PETSC_MAX_PATH_LEN],type[256];
 18:   PetscBool      flg;

 21:   PetscInitialize(&argc,&args,NULL,help);if (ierr) return ierr;
 22:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 23:   if (size != 4) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"This example requires 4 processes");
 24:   PetscOptionsGetInt(NULL,NULL,"-rhs",&N,NULL);
 25:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 26:   MatCreate(PETSC_COMM_WORLD,&A);
 27:   MatCreate(PETSC_COMM_SELF,&aux);
 28:   ISCreate(PETSC_COMM_SELF,&is);
 29:   PetscStrcpy(dir,".");
 30:   PetscOptionsGetString(NULL,NULL,"-load_dir",dir,sizeof(dir),NULL);
 31:   /* loading matrices */
 32:   PetscSNPrintf(name,sizeof(name),"%s/sizes_%d_%d.dat",dir,rank,size);
 33:   PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
 34:   ISCreate(PETSC_COMM_SELF,&sizes);
 35:   ISLoad(sizes,viewer);
 36:   ISGetIndices(sizes,&idx);
 37:   MatSetSizes(A,idx[0],idx[1],idx[2],idx[3]);
 38:   ISRestoreIndices(sizes,&idx);
 39:   ISDestroy(&sizes);
 40:   PetscViewerDestroy(&viewer);
 41:   MatSetUp(A);
 42:   PetscSNPrintf(name,sizeof(name),"%s/A.dat",dir);
 43:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,name,FILE_MODE_READ,&viewer);
 44:   MatLoad(A,viewer);
 45:   PetscViewerDestroy(&viewer);
 46:   PetscSNPrintf(name,sizeof(name),"%s/is_%d_%d.dat",dir,rank,size);
 47:   PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
 48:   ISLoad(is,viewer);
 49:   PetscViewerDestroy(&viewer);
 50:   PetscSNPrintf(name,sizeof(name),"%s/Neumann_%d_%d.dat",dir,rank,size);
 51:   PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_READ,&viewer);
 52:   MatSetBlockSizesFromMats(aux,A,A);
 53:   MatLoad(aux,viewer);
 54:   PetscViewerDestroy(&viewer);
 55:   MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE);
 56:   MatSetOption(aux,MAT_SYMMETRIC,PETSC_TRUE);
 57:   /* ready for testing */
 58:   PetscOptionsBegin(PETSC_COMM_WORLD,"","","");
 59:   PetscOptionsFList("-mat_type","Matrix type","MatSetType",MatList,deft,type,256,&flg);
 60:   PetscOptionsEnd();
 61:   if (flg) {
 62:     MatConvert(A,type,MAT_INPLACE_MATRIX,&A);
 63:     MatConvert(aux,type,MAT_INPLACE_MATRIX,&aux);
 64:   }
 65:   KSPCreate(PETSC_COMM_WORLD,&ksp);
 66:   KSPSetOperators(ksp,A,A);
 67:   KSPGetPC(ksp,&pc);
 68:   PCSetType(pc,PCHPDDM);
 69: #if defined(PETSC_HAVE_HPDDM)
 70:   PCHPDDMSetAuxiliaryMat(pc,is,aux,NULL,NULL);
 71:   PCHPDDMHasNeumannMat(pc,PETSC_FALSE); /* PETSC_TRUE is fine as well, just testing */
 72: #endif
 73:   ISDestroy(&is);
 74:   MatDestroy(&aux);
 75:   KSPSetFromOptions(ksp);
 76:   MatCreateVecs(A,&x,&b);
 77:   VecSet(b,1.0);
 78:   KSPSolve(ksp,b,x);
 79:   VecGetLocalSize(x,&m);
 80:   VecDestroy(&x);
 81:   VecDestroy(&b);
 82:   if (N > 1) {
 83:     KSPType type;
 84:     PetscOptionsClearValue(NULL,"-ksp_converged_reason");
 85:     KSPSetFromOptions(ksp);
 86:     MatCreateDense(PETSC_COMM_WORLD,m,PETSC_DECIDE,PETSC_DECIDE,N,NULL,&B);
 87:     MatCreateDense(PETSC_COMM_WORLD,m,PETSC_DECIDE,PETSC_DECIDE,N,NULL,&X);
 88:     MatSetRandom(B,NULL);
 89:     /* this is algorithmically optimal in the sense that blocks of vectors are coarsened or interpolated using matrix--matrix operations */
 90:     /* PCHPDDM however heavily relies on MPI[S]BAIJ format for which there is no efficient MatProduct implementation */
 91:     KSPMatSolve(ksp,B,X);
 92:     KSPGetType(ksp,&type);
 93:     PetscStrcmp(type,KSPHPDDM,&flg);
 94: #if defined(PETSC_HAVE_HPDDM)
 95:     if (flg) {
 96:       PetscReal    norm;
 97:       KSPHPDDMType type;
 98:       KSPHPDDMGetType(ksp,&type);
 99:       if (type == KSP_HPDDM_TYPE_PREONLY || type == KSP_HPDDM_TYPE_CG || type == KSP_HPDDM_TYPE_GMRES || type == KSP_HPDDM_TYPE_GCRODR) {
100:         Mat C;
101:         MatDuplicate(X,MAT_DO_NOT_COPY_VALUES,&C);
102:         KSPSetMatSolveBlockSize(ksp,1);
103:         KSPMatSolve(ksp,B,C);
104:         MatAYPX(C,-1.0,X,SAME_NONZERO_PATTERN);
105:         MatNorm(C,NORM_INFINITY,&norm);
106:         MatDestroy(&C);
107:         if (norm > 100*PETSC_MACHINE_EPSILON) SETERRQ2(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"KSPMatSolve() and KSPSolve() difference has nonzero norm %g with pseudo-block KSPHPDDMType %s",(double)norm,KSPHPDDMTypes[type]);
108:       }
109:     }
110: #endif
111:     MatDestroy(&X);
112:     MatDestroy(&B);
113:   }
114:   KSPDestroy(&ksp);
115:   MatDestroy(&A);
116:   PetscFinalize();
117:   return ierr;
118: }

120: /*TEST

122:    test:
123:       requires: hpddm slepc datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
124:       nsize: 4
125:       args: -ksp_rtol 1e-3 -ksp_converged_reason -pc_type {{bjacobi hpddm}shared output} -pc_hpddm_coarse_sub_pc_type lu -sub_pc_type lu -options_left no -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

127:    test:
128:       requires: hpddm slepc datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
129:       suffix: geneo
130:       nsize: 4
131:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev {{5 15}separate output} -pc_hpddm_levels_1_st_pc_type cholesky -pc_hpddm_coarse_p {{1 2}shared output} -pc_hpddm_coarse_pc_type redundant -mat_type {{aij baij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

133:    test:
134:       requires: hpddm slepc datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
135:       suffix: fgmres_geneo_20_p_2
136:       nsize: 4
137:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type lu -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_coarse_p 2 -pc_hpddm_coarse_pc_type redundant -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -pc_hpddm_log_separate {{false true}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO

139:    test:
140:       requires: hpddm slepc datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
141:       suffix: fgmres_geneo_20_p_2_geneo
142:       output_file: output/ex76_fgmres_geneo_20_p_2.out
143:       nsize: 4
144:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type {{baij sbaij}shared output} -pc_hpddm_levels_2_eps_nev {{5 20}shared output} -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_type gmres -ksp_type fgmres -pc_hpddm_coarse_mat_type {{baij sbaij}shared output} -mat_type {{aij sbaij}shared output} -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO
145:    # PCHPDDM + KSPHPDDM test to exercise multilevel + multiple RHS in one go
146:    test:
147:       requires: hpddm slepc datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
148:       suffix: fgmres_geneo_20_p_2_geneo_rhs
149:       output_file: output/ex76_fgmres_geneo_20_p_2.out
150:       nsize: 4
151:       args: -ksp_converged_reason -pc_type hpddm -pc_hpddm_levels_1_sub_pc_type cholesky -pc_hpddm_levels_1_eps_nev 20 -pc_hpddm_levels_2_p 2 -pc_hpddm_levels_2_mat_type baij -pc_hpddm_levels_2_eps_nev 5 -pc_hpddm_levels_2_sub_pc_type cholesky -pc_hpddm_levels_2_ksp_max_it 10 -pc_hpddm_levels_2_ksp_type hpddm -pc_hpddm_levels_2_ksp_hpddm_type gmres -ksp_type hpddm -ksp_hpddm_variant flexible -pc_hpddm_coarse_mat_type baij -mat_type aij -load_dir ${DATAFILESPATH}/matrices/hpddm/GENEO -rhs 4

153: TEST*/