Actual source code: rosenbrock1.c

petsc-master 2017-11-16
Report Typos and Errors
  1: /* Program usage: mpiexec -n 1 rosenbrock1 [-help] [all TAO options] */

  3: /*  Include "petsctao.h" so we can use TAO solvers.  */
  4:  #include <petsctao.h>

  6: static  char help[] = "This example demonstrates use of the TAO package to \n\
  7: solve an unconstrained minimization problem on a single processor.  We \n\
  8: minimize the extended Rosenbrock function: \n\
  9:    sum_{i=0}^{n/2-1} ( alpha*(x_{2i+1}-x_{2i}^2)^2 + (1-x_{2i})^2 ) \n\
 10: or the chained Rosenbrock function:\n\
 11:    sum_{i=0}^{n-1} alpha*(x_{i+1} - x_i^2)^2 + (1 - x_i)^2\n";

 13: /*T
 14:    Concepts: TAO^Solving an unconstrained minimization problem
 15:    Routines: TaoCreate();
 16:    Routines: TaoSetType(); TaoSetObjectiveAndGradientRoutine();
 17:    Routines: TaoSetHessianRoutine();
 18:    Routines: TaoSetInitialVector();
 19:    Routines: TaoSetFromOptions();
 20:    Routines: TaoSolve();
 21:    Routines: TaoDestroy();
 22:    Processors: 1
 23: T*/


 26: /*
 27:    User-defined application context - contains data needed by the
 28:    application-provided call-back routines that evaluate the function,
 29:    gradient, and hessian.
 30: */
 31: typedef struct {
 32:   PetscInt  n;          /* dimension */
 33:   PetscReal alpha;   /* condition parameter */
 34:   PetscBool chained;
 35: } AppCtx;

 37: /* -------------- User-defined routines ---------- */
 38: PetscErrorCode FormFunctionGradient(Tao,Vec,PetscReal*,Vec,void*);
 39: PetscErrorCode FormHessian(Tao,Vec,Mat,Mat,void*);

 41: int main(int argc,char **argv)
 42: {
 43:   PetscErrorCode     ierr;                  /* used to check for functions returning nonzeros */
 44:   PetscReal          zero=0.0;
 45:   Vec                x;                     /* solution vector */
 46:   Mat                H;
 47:   Tao                tao;                   /* Tao solver context */
 48:   PetscBool          flg;
 49:   PetscMPIInt        size,rank;                  /* number of processes running */
 50:   AppCtx             user;                  /* user-defined application context */

 52:   /* Initialize TAO and PETSc */
 53:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 54:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 55:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 56:   if (size >1) SETERRQ(PETSC_COMM_SELF,1,"Incorrect number of processors");

 58:   /* Initialize problem parameters */
 59:   user.n = 2; user.alpha = 99.0; user.chained = PETSC_FALSE;
 60:   /* Check for command line arguments to override defaults */
 61:   PetscOptionsGetInt(NULL,NULL,"-n",&user.n,&flg);
 62:   PetscOptionsGetReal(NULL,NULL,"-alpha",&user.alpha,&flg);
 63:   PetscOptionsGetBool(NULL,NULL,"-chained",&user.chained,&flg);

 65:   /* Allocate vectors for the solution and gradient */
 66:   VecCreateSeq(PETSC_COMM_SELF,user.n,&x);
 67:   MatCreateSeqBAIJ(PETSC_COMM_SELF,2,user.n,user.n,1,NULL,&H);

 69:   /* The TAO code begins here */

 71:   /* Create TAO solver with desired solution method */
 72:   TaoCreate(PETSC_COMM_SELF,&tao);
 73:   TaoSetType(tao,TAOLMVM);

 75:   /* Set solution vec and an initial guess */
 76:   VecSet(x, zero);
 77:   TaoSetInitialVector(tao,x);

 79:   /* Set routines for function, gradient, hessian evaluation */
 80:   TaoSetObjectiveAndGradientRoutine(tao,FormFunctionGradient,&user);
 81:   TaoSetHessianRoutine(tao,H,H,FormHessian,&user);

 83:   /* Check for TAO command line options */
 84:   TaoSetFromOptions(tao);

 86:   /* SOLVE THE APPLICATION */
 87:   TaoSolve(tao);

 89:   TaoDestroy(&tao);
 90:   VecDestroy(&x);
 91:   MatDestroy(&H);

 93:   PetscFinalize();
 94:   return ierr;
 95: }

 97: /* -------------------------------------------------------------------- */
 98: /*
 99:     FormFunctionGradient - Evaluates the function, f(X), and gradient, G(X).

101:     Input Parameters:
102: .   tao  - the Tao context
103: .   X    - input vector
104: .   ptr  - optional user-defined context, as set by TaoSetFunctionGradient()

106:     Output Parameters:
107: .   G - vector containing the newly evaluated gradient
108: .   f - function value

110:     Note:
111:     Some optimization methods ask for the function and the gradient evaluation
112:     at the same time.  Evaluating both at once may be more efficient that
113:     evaluating each separately.
114: */
115: PetscErrorCode FormFunctionGradient(Tao tao,Vec X,PetscReal *f, Vec G,void *ptr)
116: {
117:   AppCtx            *user = (AppCtx *) ptr;
118:   PetscInt          i,nn=user->n/2;
119:   PetscErrorCode    ierr;
120:   PetscReal         ff=0,t1,t2,alpha=user->alpha;
121:   PetscScalar       *g;
122:   const PetscScalar *x;

125:   /* Get pointers to vector data */
126:   VecGetArrayRead(X,&x);
127:   VecGetArray(G,&g);

129:   /* Compute G(X) */
130:   if (user->chained) {
131:     g[0] = 0;
132:     for (i=0; i<user->n-1; i++) {
133:       t1 = x[i+1] - x[i]*x[i];
134:       ff += PetscSqr(1 - x[i]) + alpha*t1*t1;
135:       g[i] += -2*(1 - x[i]) + 2*alpha*t1*(-2*x[i]);
136:       g[i+1] = 2*alpha*t1;
137:     }
138:   } else {
139:     for (i=0; i<nn; i++){
140:       t1 = x[2*i+1]-x[2*i]*x[2*i]; t2= 1-x[2*i];
141:       ff += alpha*t1*t1 + t2*t2;
142:       g[2*i] = -4*alpha*t1*x[2*i]-2.0*t2;
143:       g[2*i+1] = 2*alpha*t1;
144:     }
145:   }

147:   /* Restore vectors */
148:   VecRestoreArrayRead(X,&x);
149:   VecRestoreArray(G,&g);
150:   *f   = ff;

152:   PetscLogFlops(nn*15);
153:   return(0);
154: }

156: /* ------------------------------------------------------------------- */
157: /*
158:    FormHessian - Evaluates Hessian matrix.

160:    Input Parameters:
161: .  tao   - the Tao context
162: .  x     - input vector
163: .  ptr   - optional user-defined context, as set by TaoSetHessian()

165:    Output Parameters:
166: .  H     - Hessian matrix

168:    Note:  Providing the Hessian may not be necessary.  Only some solvers
169:    require this matrix.
170: */
171: PetscErrorCode FormHessian(Tao tao,Vec X,Mat H, Mat Hpre, void *ptr)
172: {
173:   AppCtx            *user = (AppCtx*)ptr;
174:   PetscErrorCode    ierr;
175:   PetscInt          i, ind[2];
176:   PetscReal         alpha=user->alpha;
177:   PetscReal         v[2][2];
178:   const PetscScalar *x;
179:   PetscBool         assembled;

182:   /* Zero existing matrix entries */
183:   MatAssembled(H,&assembled);
184:   if (assembled){MatZeroEntries(H); }

186:   /* Get a pointer to vector data */
187:   VecGetArrayRead(X,&x);

189:   /* Compute H(X) entries */
190:   if (user->chained) {
191:     MatZeroEntries(H);
192:     for (i=0; i<user->n-1; i++) {
193:       PetscScalar t1 = x[i+1] - x[i]*x[i];
194:       v[0][0] = 2 + 2*alpha*(t1*(-2) - 2*x[i]);
195:       v[0][1] = 2*alpha*(-2*x[i]);
196:       v[1][0] = 2*alpha*(-2*x[i]);
197:       v[1][1] = 2*alpha*t1;
198:       ind[0] = i; ind[1] = i+1;
199:       MatSetValues(H,2,ind,2,ind,v[0],ADD_VALUES);
200:     }
201:   } else {
202:     for (i=0; i<user->n/2; i++){
203:       v[1][1] = 2*alpha;
204:       v[0][0] = -4*alpha*(x[2*i+1]-3*x[2*i]*x[2*i]) + 2;
205:       v[1][0] = v[0][1] = -4.0*alpha*x[2*i];
206:       ind[0]=2*i; ind[1]=2*i+1;
207:       MatSetValues(H,2,ind,2,ind,v[0],INSERT_VALUES);
208:     }
209:   }
210:   VecRestoreArrayRead(X,&x);

212:   /* Assemble matrix */
213:   MatAssemblyBegin(H,MAT_FINAL_ASSEMBLY);
214:   MatAssemblyEnd(H,MAT_FINAL_ASSEMBLY);
215:   PetscLogFlops(9.0*user->n/2.0);
216:   return(0);
217: }