Actual source code: taosolver_fg.c

tao-2.1-p0 2012-07-24
  1: #include "tao-private/taosolver_impl.h" /*I "taosolver.h" I*/

  5: /*@
  6:   TaoSetInitialVector - Sets the initial guess for the solve

  8:   Logically collective on TaoSolver
  9:   
 10:   Input Parameters:
 11: + tao - the TaoSolver context
 12: - x0  - the initial guess 
 13:  
 14:   Level: beginner
 15: .seealso: TaoCreate(), TaoSolve()
 16: @*/

 18: PetscErrorCode TaoSetInitialVector(TaoSolver tao, Vec x0) {

 23:     if (x0) {
 25:         PetscObjectReference((PetscObject)x0);
 26:     }
 27:     if (tao->solution) {
 28:         VecDestroy(&tao->solution); 
 29:     }
 30:     tao->solution = x0;
 31:     return(0);
 32: }

 36: /*@
 37:   TaoComputeGradient - Computes the gradient of the objective function

 39:   Collective on TaoSolver

 41:   Input Parameters:
 42: + tao - the TaoSolver context
 43: - X - input vector

 45:   Output Parameter:
 46: . G - gradient vector  

 48:   Notes: TaoComputeGradient() is typically used within minimization implementations,
 49:   so most users would not generally call this routine themselves.

 51:   Level: advanced

 53: .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
 54: @*/
 55: PetscErrorCode TaoComputeGradient(TaoSolver tao, Vec X, Vec G) 
 56: {
 58:     PetscReal dummy;
 65:     if (tao->ops->computegradient) {
 66:         PetscLogEventBegin(TaoSolver_GradientEval,tao,X,G,PETSC_NULL); 
 67:         PetscStackPush("TaoSolver user gradient evaluation routine");
 68:         CHKMEMQ;
 69:         (*tao->ops->computegradient)(tao,X,G,tao->user_gradP); 
 70:         CHKMEMQ;
 71:         PetscStackPop;
 72:         PetscLogEventEnd(TaoSolver_GradientEval,tao,X,G,PETSC_NULL); 
 73:         tao->ngrads++;
 74:     } else if (tao->ops->computeobjectiveandgradient) {
 75:         PetscLogEventBegin(TaoSolver_ObjGradientEval,tao,X,G,PETSC_NULL); 
 76:         PetscStackPush("Tao user objective/gradient evaluation routine");
 77:         CHKMEMQ;
 78:         (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP); 
 79:         CHKMEMQ;
 80:         PetscStackPop;
 81:         PetscLogEventEnd(TaoSolver_ObjGradientEval,tao,X,G,PETSC_NULL); 
 82:         tao->nfuncgrads++;
 83:     }  else {
 84:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
 85:     }
 86:     return(0);
 87: }


 92: /*@
 93:   TaoComputeObjective - Computes the objective function value at a given point

 95:   Collective on TaoSolver

 97:   Input Parameters:
 98: + tao - the TaoSolver context
 99: - X - input vector

101:   Output Parameter:
102: . f - Objective value at X

104:   Notes: TaoComputeObjective() is typically used within minimization implementations,
105:   so most users would not generally call this routine themselves.

107:   Level: advanced

109: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
110: @*/
111: PetscErrorCode TaoComputeObjective(TaoSolver tao, Vec X, PetscReal *f) 
112: {
114:     Vec temp;
119:     if (tao->ops->computeobjective) {
120:         PetscLogEventBegin(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
121:         PetscStackPush("TaoSolver user objective evaluation routine");
122:         CHKMEMQ;
123:         (*tao->ops->computeobjective)(tao,X,f,tao->user_objP); 
124:         CHKMEMQ;
125:         PetscStackPop;
126:         PetscLogEventEnd(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
127:         tao->nfuncs++;
128:     } else if (tao->ops->computeobjectiveandgradient) {
129:         PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine"); 
130:         VecDuplicate(X,&temp); 
131:         PetscLogEventBegin(TaoSolver_ObjGradientEval,tao,X,PETSC_NULL,PETSC_NULL); 
132:         PetscStackPush("TaoSolver user objective/gradient evaluation routine");
133:         CHKMEMQ;
134:         (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP); 
135:         CHKMEMQ;
136:         PetscStackPop;
137:         PetscLogEventEnd(TaoSolver_ObjGradientEval,tao,X,PETSC_NULL,PETSC_NULL); 
138:         VecDestroy(&temp); 
139:         tao->nfuncgrads++;

141:     }  else {
142:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
143:     }
144:     PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",*f);    
145:     return(0);
146: }

150: /*@
151:   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point

153:   Collective on TaoSolver

155:   Input Parameters:
156: + tao - the TaoSolver context
157: - X - input vector

159:   Output Parameter:
160: + f - Objective value at X
161: - g - Gradient vector at X

163:   Notes: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
164:   so most users would not generally call this routine themselves.

166:   Level: advanced

168: .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
169: @*/
170: PetscErrorCode TaoComputeObjectiveAndGradient(TaoSolver tao, Vec X, PetscReal *f, Vec G)
171: {
179:   if (tao->ops->computeobjectiveandgradient) {
180:       PetscLogEventBegin(TaoSolver_ObjGradientEval,tao,X,G,PETSC_NULL); 
181:       PetscStackPush("TaoSolver user objective/gradient evaluation routine");
182:       CHKMEMQ;
183:       (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP); 
184:       if (tao->ops->computegradient == TaoDefaultComputeGradient) {
185:         /* Overwrite gradient with finite difference gradient */
186:         TaoDefaultComputeGradient(tao,X,G,tao->user_objgradP); 
187:       }
188:       CHKMEMQ;
189:       PetscStackPop;
190:       PetscLogEventEnd(TaoSolver_ObjGradientEval,tao,X,G,PETSC_NULL); 
191:       tao->nfuncgrads++;
192:   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
193:       PetscLogEventBegin(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
194:       PetscStackPush("TaoSolver user objective evaluation routine");
195:       CHKMEMQ;
196:       (*tao->ops->computeobjective)(tao,X,f,tao->user_objP); 
197:       CHKMEMQ;
198:       PetscStackPop;
199:       PetscLogEventEnd(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
200:       tao->nfuncs++;
201:       
202:       PetscLogEventBegin(TaoSolver_GradientEval,tao,X,G,PETSC_NULL); 
203:       PetscStackPush("TaoSolver user gradient evaluation routine");
204:       CHKMEMQ;
205:       (*tao->ops->computegradient)(tao,X,G,tao->user_gradP); 
206:       CHKMEMQ;
207:       PetscStackPop;
208:       PetscLogEventEnd(TaoSolver_GradientEval,tao,X,G,PETSC_NULL); 
209:       tao->ngrads++;
210:   } else {
211:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
212:   }
213:   PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",*f);
214:   return(0);
215: } 

219: /*@C
220:   TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization

222:   Logically collective on TaoSolver

224:   Input Parameter:
225: + tao - the TaoSolver context
226: . func - the objective function
227: - ctx - [optional] user-defined context for private data for the function evaluation
228:         routine (may be PETSC_NULL)

230:   Calling sequence of func:
231: $      func (TaoSolver tao, Vec x, PetscReal *f, void *ctx);

233: + x - input vector
234: . f - function value
235: - ctx - [optional] user-defined function context

237:   Level: beginner

239: .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
240: @*/
241: PetscErrorCode TaoSetObjectiveRoutine(TaoSolver tao, PetscErrorCode (*func)(TaoSolver, Vec, PetscReal*,void*),void *ctx) 
242: {
245:     tao->user_objP = ctx;
246:     tao->ops->computeobjective = func;
247:     return(0);
248: }

252: /*@C
253:   TaoSetSeparableObjectiveRoutine - Sets the function evaluation routine for least-square applications

255:   Logically collective on TaoSolver

257:   Input Parameter:
258: + tao - the TaoSolver context
259: . func - the objective function evaluation routine
260: - ctx - [optional] user-defined context for private data for the function evaluation
261:         routine (may be PETSC_NULL)

263:   Calling sequence of func:
264: $      func (TaoSolver tao, Vec x, Vec f, void *ctx);

266: + x - input vector
267: . f - function value vector 
268: - ctx - [optional] user-defined function context

270:   Level: beginner

272: .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
273: @*/
274: PetscErrorCode TaoSetSeparableObjectiveRoutine(TaoSolver tao, Vec sepobj, PetscErrorCode (*func)(TaoSolver, Vec, Vec, void*),void *ctx)
275: {
279:     tao->user_sepobjP = ctx;
280:     tao->sep_objective = sepobj;
281:     tao->ops->computeseparableobjective = func;
282:     return(0);
283: }

287: /*@
288:   TaoComputeSeparableObjective - Computes a separable objective function vector at a given point (for least-square applications)

290:   Collective on TaoSolver

292:   Input Parameters:
293: + tao - the TaoSolver context
294: - X - input vector

296:   Output Parameter:
297: . f - Objective vector at X

299:   Notes: TaoComputeSeparableObjective() is typically used within minimization implementations,
300:   so most users would not generally call this routine themselves.

302:   Level: advanced

304: .seealso: TaoSetSeparableObjectiveRoutine()
305: @*/
306: PetscErrorCode TaoComputeSeparableObjective(TaoSolver tao, Vec X, Vec F) 
307: {
315:     if (tao->ops->computeseparableobjective) {
316:         PetscLogEventBegin(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
317:         PetscStackPush("TaoSolver user separable objective evaluation routine");
318:         CHKMEMQ;
319:         (*tao->ops->computeseparableobjective)(tao,X,F,tao->user_sepobjP); 
320:         CHKMEMQ;
321:         PetscStackPop;
322:         PetscLogEventEnd(TaoSolver_ObjectiveEval,tao,X,PETSC_NULL,PETSC_NULL); 
323:         tao->nfuncs++;
324:     } else {
325:         SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetSeparableObjectiveRoutine() has not been called");
326:     }
327:     PetscInfo(tao,"TAO separable function evaluation.\n"); 
328:     return(0);
329: }

333: /*@C
334:   TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization

336:   Logically collective on TaoSolver

338:   Input Parameter:
339: + tao - the TaoSolver context
340: . func - the gradient function
341: - ctx - [optional] user-defined context for private data for the gradient evaluation
342:         routine (may be PETSC_NULL)

344:   Calling sequence of func:
345: $      func (TaoSolver tao, Vec x, Vec g, void *ctx);

347: + x - input vector
348: . g - gradient value (output)
349: - ctx - [optional] user-defined function context

351:   Level: beginner

353: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
354: @*/
355: PetscErrorCode TaoSetGradientRoutine(TaoSolver tao,  PetscErrorCode (*func)(TaoSolver, Vec, Vec, void*),void *ctx) 
356: {
359:     tao->user_gradP = ctx;
360:     tao->ops->computegradient = func;
361:     return(0);
362: }


367: /*@C
368:   TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization

370:   Logically collective on TaoSolver

372:   Input Parameter:
373: + tao - the TaoSolver context
374: . func - the gradient function
375: - ctx - [optional] user-defined context for private data for the gradient evaluation
376:         routine (may be PETSC_NULL)

378:   Calling sequence of func:
379: $      func (TaoSolver tao, Vec x, Vec g, void *ctx);

381: + x - input vector
382: . g - gradient value (output)
383: - ctx - [optional] user-defined function context

385:   Level: beginner

387: .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
388: @*/
389: PetscErrorCode TaoSetObjectiveAndGradientRoutine(TaoSolver tao, PetscErrorCode (*func)(TaoSolver, Vec, PetscReal *, Vec, void*), void *ctx)
390: {
393:     tao->user_objgradP = ctx;
394:     tao->ops->computeobjectiveandgradient = func;
395:     return(0);
396: }
397:   
400: /*@
401:   TaoIsObjectiveDefined -- Checks to see if the user has
402:   declared an objective-only routine.  Useful for determining when
403:   it is appropriate to call TaoComputeObjective() or 
404:   TaoComputeObjectiveAndGradient()

406:   Collective on TaoSolver

408:   Input Parameter:
409: + tao - the TaoSolver context
410: - ctx - PETSC_TRUE if objective function routine is set by user, 
411:         PETSC_FALSE otherwise
412:   Level: developer

414: .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
415: @*/
416: PetscErrorCode TaoIsObjectiveDefined(TaoSolver tao, PetscBool *flg)
417: {
420:   if (tao->ops->computeobjective == 0) 
421:     *flg = PETSC_FALSE;
422:   else
423:     *flg = PETSC_TRUE;
424:   return(0);
425: }

429: /*@
430:   TaoIsGradientDefined -- Checks to see if the user has
431:   declared an objective-only routine.  Useful for determining when
432:   it is appropriate to call TaoComputeGradient() or 
433:   TaoComputeGradientAndGradient()

435:   Not Collective

437:   Input Parameter:
438: + tao - the TaoSolver context
439: - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
440:   Level: developer

442: .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
443: @*/
444: PetscErrorCode TaoIsGradientDefined(TaoSolver tao, PetscBool *flg)
445: {
448:   if (tao->ops->computegradient == 0) 
449:     *flg = PETSC_FALSE;
450:   else
451:     *flg = PETSC_TRUE;
452:   return(0);
453: }


458: /*@
459:   TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
460:   declared a joint objective/gradient routine.  Useful for determining when
461:   it is appropriate to call TaoComputeObjective() or 
462:   TaoComputeObjectiveAndGradient()

464:   Not Collective

466:   Input Parameter:
467: + tao - the TaoSolver context
468: - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
469:   Level: developer

471: .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
472: @*/
473: PetscErrorCode TaoIsObjectiveAndGradientDefined(TaoSolver tao, PetscBool *flg)
474: {
477:   if (tao->ops->computeobjectiveandgradient == 0) 
478:     *flg = PETSC_FALSE;
479:   else
480:     *flg = PETSC_TRUE;
481:   return(0);
482: }