Actual source code: tron.c

  1: #include <../src/tao/bound/impls/tron/tron.h>
  2: #include <../src/tao/matrix/submatfree.h>

  4: /* TRON Routines */
  5: static PetscErrorCode TronGradientProjections(Tao, TAO_TRON *);
  6: /*------------------------------------------------------------*/
  7: static PetscErrorCode TaoDestroy_TRON(Tao tao)
  8: {
  9:   TAO_TRON *tron = (TAO_TRON *)tao->data;

 11:   PetscFunctionBegin;
 12:   PetscCall(VecDestroy(&tron->X_New));
 13:   PetscCall(VecDestroy(&tron->G_New));
 14:   PetscCall(VecDestroy(&tron->Work));
 15:   PetscCall(VecDestroy(&tron->DXFree));
 16:   PetscCall(VecDestroy(&tron->R));
 17:   PetscCall(VecDestroy(&tron->diag));
 18:   PetscCall(VecScatterDestroy(&tron->scatter));
 19:   PetscCall(ISDestroy(&tron->Free_Local));
 20:   PetscCall(MatDestroy(&tron->H_sub));
 21:   PetscCall(MatDestroy(&tron->Hpre_sub));
 22:   PetscCall(KSPDestroy(&tao->ksp));
 23:   PetscCall(PetscFree(tao->data));
 24:   PetscFunctionReturn(PETSC_SUCCESS);
 25: }

 27: /*------------------------------------------------------------*/
 28: static PetscErrorCode TaoSetFromOptions_TRON(Tao tao, PetscOptionItems *PetscOptionsObject)
 29: {
 30:   TAO_TRON *tron = (TAO_TRON *)tao->data;
 31:   PetscBool flg;

 33:   PetscFunctionBegin;
 34:   PetscOptionsHeadBegin(PetscOptionsObject, "Newton Trust Region Method for bound constrained optimization");
 35:   PetscCall(PetscOptionsInt("-tao_tron_maxgpits", "maximum number of gradient projections per TRON iterate", "TaoSetMaxGPIts", tron->maxgpits, &tron->maxgpits, &flg));
 36:   PetscOptionsHeadEnd();
 37:   PetscCall(KSPSetFromOptions(tao->ksp));
 38:   PetscFunctionReturn(PETSC_SUCCESS);
 39: }

 41: /*------------------------------------------------------------*/
 42: static PetscErrorCode TaoView_TRON(Tao tao, PetscViewer viewer)
 43: {
 44:   TAO_TRON *tron = (TAO_TRON *)tao->data;
 45:   PetscBool isascii;

 47:   PetscFunctionBegin;
 48:   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
 49:   if (isascii) {
 50:     PetscCall(PetscViewerASCIIPrintf(viewer, "Total PG its: %" PetscInt_FMT ",", tron->total_gp_its));
 51:     PetscCall(PetscViewerASCIIPrintf(viewer, "PG tolerance: %g \n", (double)tron->pg_ftol));
 52:   }
 53:   PetscFunctionReturn(PETSC_SUCCESS);
 54: }

 56: /* ---------------------------------------------------------- */
 57: static PetscErrorCode TaoSetup_TRON(Tao tao)
 58: {
 59:   TAO_TRON *tron = (TAO_TRON *)tao->data;

 61:   PetscFunctionBegin;
 62:   /* Allocate some arrays */
 63:   PetscCall(VecDuplicate(tao->solution, &tron->diag));
 64:   PetscCall(VecDuplicate(tao->solution, &tron->X_New));
 65:   PetscCall(VecDuplicate(tao->solution, &tron->G_New));
 66:   PetscCall(VecDuplicate(tao->solution, &tron->Work));
 67:   PetscCall(VecDuplicate(tao->solution, &tao->gradient));
 68:   PetscCall(VecDuplicate(tao->solution, &tao->stepdirection));
 69:   PetscFunctionReturn(PETSC_SUCCESS);
 70: }

 72: static PetscErrorCode TaoSolve_TRON(Tao tao)
 73: {
 74:   TAO_TRON                    *tron = (TAO_TRON *)tao->data;
 75:   PetscInt                     its;
 76:   TaoLineSearchConvergedReason ls_reason = TAOLINESEARCH_CONTINUE_ITERATING;
 77:   PetscReal                    prered, actred, delta, f, f_new, rhok, gdx, xdiff, stepsize;

 79:   PetscFunctionBegin;
 80:   tron->pgstepsize = 1.0;
 81:   tao->trust       = tao->trust0;
 82:   /*   Project the current point onto the feasible set */
 83:   PetscCall(TaoComputeVariableBounds(tao));
 84:   PetscCall(TaoLineSearchSetVariableBounds(tao->linesearch, tao->XL, tao->XU));

 86:   /* Project the initial point onto the feasible region */
 87:   PetscCall(VecMedian(tao->XL, tao->solution, tao->XU, tao->solution));

 89:   /* Compute the objective function and gradient */
 90:   PetscCall(TaoComputeObjectiveAndGradient(tao, tao->solution, &tron->f, tao->gradient));
 91:   PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));
 92:   PetscCheck(!PetscIsInfOrNanReal(tron->f) && !PetscIsInfOrNanReal(tron->gnorm), PetscObjectComm((PetscObject)tao), PETSC_ERR_USER, "User provided compute function generated Inf or NaN");

 94:   /* Project the gradient and calculate the norm */
 95:   PetscCall(VecBoundGradientProjection(tao->gradient, tao->solution, tao->XL, tao->XU, tao->gradient));
 96:   PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));

 98:   /* Initialize trust region radius */
 99:   tao->trust = tao->trust0;
100:   if (tao->trust <= 0) tao->trust = PetscMax(tron->gnorm * tron->gnorm, 1.0);

102:   /* Initialize step sizes for the line searches */
103:   tron->pgstepsize = 1.0;
104:   tron->stepsize   = tao->trust;

106:   tao->reason = TAO_CONTINUE_ITERATING;
107:   PetscCall(TaoLogConvergenceHistory(tao, tron->f, tron->gnorm, 0.0, tao->ksp_its));
108:   PetscCall(TaoMonitor(tao, tao->niter, tron->f, tron->gnorm, 0.0, tron->stepsize));
109:   PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
110:   while (tao->reason == TAO_CONTINUE_ITERATING) {
111:     /* Call general purpose update function */
112:     PetscTryTypeMethod(tao, update, tao->niter, tao->user_update);

114:     /* Perform projected gradient iterations */
115:     PetscCall(TronGradientProjections(tao, tron));

117:     PetscCall(VecBoundGradientProjection(tao->gradient, tao->solution, tao->XL, tao->XU, tao->gradient));
118:     PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));

120:     tao->ksp_its      = 0;
121:     f                 = tron->f;
122:     delta             = tao->trust;
123:     tron->n_free_last = tron->n_free;
124:     PetscCall(TaoComputeHessian(tao, tao->solution, tao->hessian, tao->hessian_pre));

126:     /* Generate index set (IS) of which bound constraints are active */
127:     PetscCall(ISDestroy(&tron->Free_Local));
128:     PetscCall(VecWhichInactive(tao->XL, tao->solution, tao->gradient, tao->XU, PETSC_TRUE, &tron->Free_Local));
129:     PetscCall(ISGetSize(tron->Free_Local, &tron->n_free));

131:     /* If no free variables */
132:     if (tron->n_free == 0) {
133:       PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));
134:       PetscCall(TaoLogConvergenceHistory(tao, tron->f, tron->gnorm, 0.0, tao->ksp_its));
135:       PetscCall(TaoMonitor(tao, tao->niter, tron->f, tron->gnorm, 0.0, delta));
136:       PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
137:       if (!tao->reason) tao->reason = TAO_CONVERGED_STEPTOL;
138:       break;
139:     }
140:     /* use free_local to mask/submat gradient, hessian, stepdirection */
141:     PetscCall(TaoVecGetSubVec(tao->gradient, tron->Free_Local, tao->subset_type, 0.0, &tron->R));
142:     PetscCall(TaoVecGetSubVec(tao->gradient, tron->Free_Local, tao->subset_type, 0.0, &tron->DXFree));
143:     PetscCall(VecSet(tron->DXFree, 0.0));
144:     PetscCall(VecScale(tron->R, -1.0));
145:     PetscCall(TaoMatGetSubMat(tao->hessian, tron->Free_Local, tron->diag, tao->subset_type, &tron->H_sub));
146:     if (tao->hessian == tao->hessian_pre) {
147:       PetscCall(MatDestroy(&tron->Hpre_sub));
148:       PetscCall(PetscObjectReference((PetscObject)tron->H_sub));
149:       tron->Hpre_sub = tron->H_sub;
150:     } else {
151:       PetscCall(TaoMatGetSubMat(tao->hessian_pre, tron->Free_Local, tron->diag, tao->subset_type, &tron->Hpre_sub));
152:     }
153:     PetscCall(KSPReset(tao->ksp));
154:     PetscCall(KSPSetOperators(tao->ksp, tron->H_sub, tron->Hpre_sub));
155:     while (1) {
156:       /* Approximately solve the reduced linear system */
157:       PetscCall(KSPCGSetRadius(tao->ksp, delta));

159:       PetscCall(KSPSolve(tao->ksp, tron->R, tron->DXFree));
160:       PetscCall(KSPGetIterationNumber(tao->ksp, &its));
161:       tao->ksp_its += its;
162:       tao->ksp_tot_its += its;
163:       PetscCall(VecSet(tao->stepdirection, 0.0));

165:       /* Add dxfree matrix to compute step direction vector */
166:       PetscCall(VecISAXPY(tao->stepdirection, tron->Free_Local, 1.0, tron->DXFree));

168:       PetscCall(VecDot(tao->gradient, tao->stepdirection, &gdx));
169:       PetscCall(VecCopy(tao->solution, tron->X_New));
170:       PetscCall(VecCopy(tao->gradient, tron->G_New));

172:       stepsize = 1.0;
173:       f_new    = f;

175:       PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch, 1.0));
176:       PetscCall(TaoLineSearchApply(tao->linesearch, tron->X_New, &f_new, tron->G_New, tao->stepdirection, &stepsize, &ls_reason));
177:       PetscCall(TaoAddLineSearchCounts(tao));

179:       PetscCall(MatMult(tao->hessian, tao->stepdirection, tron->Work));
180:       PetscCall(VecAYPX(tron->Work, 0.5, tao->gradient));
181:       PetscCall(VecDot(tao->stepdirection, tron->Work, &prered));
182:       actred = f_new - f;
183:       if ((PetscAbsScalar(actred) <= 1e-6) && (PetscAbsScalar(prered) <= 1e-6)) {
184:         rhok = 1.0;
185:       } else if (actred < 0) {
186:         rhok = PetscAbs(-actred / prered);
187:       } else {
188:         rhok = 0.0;
189:       }

191:       /* Compare actual improvement to the quadratic model */
192:       if (rhok > tron->eta1) { /* Accept the point */
193:         /* d = x_new - x */
194:         PetscCall(VecCopy(tron->X_New, tao->stepdirection));
195:         PetscCall(VecAXPY(tao->stepdirection, -1.0, tao->solution));

197:         PetscCall(VecNorm(tao->stepdirection, NORM_2, &xdiff));
198:         xdiff *= stepsize;

200:         /* Adjust trust region size */
201:         if (rhok < tron->eta2) {
202:           delta = PetscMin(xdiff, delta) * tron->sigma1;
203:         } else if (rhok > tron->eta4) {
204:           delta = PetscMin(xdiff, delta) * tron->sigma3;
205:         } else if (rhok > tron->eta3) {
206:           delta = PetscMin(xdiff, delta) * tron->sigma2;
207:         }
208:         PetscCall(VecBoundGradientProjection(tron->G_New, tron->X_New, tao->XL, tao->XU, tao->gradient));
209:         PetscCall(ISDestroy(&tron->Free_Local));
210:         PetscCall(VecWhichInactive(tao->XL, tron->X_New, tao->gradient, tao->XU, PETSC_TRUE, &tron->Free_Local));
211:         f = f_new;
212:         PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));
213:         PetscCall(VecCopy(tron->X_New, tao->solution));
214:         PetscCall(VecCopy(tron->G_New, tao->gradient));
215:         break;
216:       } else if (delta <= 1e-30) {
217:         break;
218:       } else {
219:         delta /= 4.0;
220:       }
221:     } /* end linear solve loop */

223:     tron->f      = f;
224:     tron->actred = actred;
225:     tao->trust   = delta;
226:     tao->niter++;
227:     PetscCall(TaoLogConvergenceHistory(tao, tron->f, tron->gnorm, 0.0, tao->ksp_its));
228:     PetscCall(TaoMonitor(tao, tao->niter, tron->f, tron->gnorm, 0.0, stepsize));
229:     PetscUseTypeMethod(tao, convergencetest, tao->cnvP);
230:   } /* END MAIN LOOP  */
231:   PetscFunctionReturn(PETSC_SUCCESS);
232: }

234: static PetscErrorCode TronGradientProjections(Tao tao, TAO_TRON *tron)
235: {
236:   PetscInt                     i;
237:   TaoLineSearchConvergedReason ls_reason;
238:   PetscReal                    actred = -1.0, actred_max = 0.0;
239:   PetscReal                    f_new;
240:   /*
241:      The gradient and function value passed into and out of this
242:      routine should be current and correct.

244:      The free, active, and binding variables should be already identified
245:   */

247:   PetscFunctionBegin;
248:   for (i = 0; i < tron->maxgpits; ++i) {
249:     if (-actred <= (tron->pg_ftol) * actred_max) break;

251:     ++tron->gp_iterates;
252:     ++tron->total_gp_its;
253:     f_new = tron->f;

255:     PetscCall(VecCopy(tao->gradient, tao->stepdirection));
256:     PetscCall(VecScale(tao->stepdirection, -1.0));
257:     PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch, tron->pgstepsize));
258:     PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &f_new, tao->gradient, tao->stepdirection, &tron->pgstepsize, &ls_reason));
259:     PetscCall(TaoAddLineSearchCounts(tao));

261:     PetscCall(VecBoundGradientProjection(tao->gradient, tao->solution, tao->XL, tao->XU, tao->gradient));
262:     PetscCall(VecNorm(tao->gradient, NORM_2, &tron->gnorm));

264:     /* Update the iterate */
265:     actred     = f_new - tron->f;
266:     actred_max = PetscMax(actred_max, -(f_new - tron->f));
267:     tron->f    = f_new;
268:   }
269:   PetscFunctionReturn(PETSC_SUCCESS);
270: }

272: static PetscErrorCode TaoComputeDual_TRON(Tao tao, Vec DXL, Vec DXU)
273: {
274:   TAO_TRON *tron = (TAO_TRON *)tao->data;

276:   PetscFunctionBegin;
280:   PetscCheck(tron->Work && tao->gradient, PETSC_COMM_SELF, PETSC_ERR_ORDER, "Dual variables don't exist yet or no longer exist.");

282:   PetscCall(VecBoundGradientProjection(tao->gradient, tao->solution, tao->XL, tao->XU, tron->Work));
283:   PetscCall(VecCopy(tron->Work, DXL));
284:   PetscCall(VecAXPY(DXL, -1.0, tao->gradient));
285:   PetscCall(VecSet(DXU, 0.0));
286:   PetscCall(VecPointwiseMax(DXL, DXL, DXU));

288:   PetscCall(VecCopy(tao->gradient, DXU));
289:   PetscCall(VecAXPY(DXU, -1.0, tron->Work));
290:   PetscCall(VecSet(tron->Work, 0.0));
291:   PetscCall(VecPointwiseMin(DXU, tron->Work, DXU));
292:   PetscFunctionReturn(PETSC_SUCCESS);
293: }

295: /*------------------------------------------------------------*/
296: /*MC
297:   TAOTRON - The TRON algorithm is an active-set Newton trust region method
298:   for bound-constrained minimization.

300:   Options Database Keys:
301: + -tao_tron_maxgpits - maximum number of gradient projections per TRON iterate
302: - -tao_subset_type - "subvec","mask","matrix-free", strategies for handling active-sets

304:   Level: beginner
305: M*/
306: PETSC_EXTERN PetscErrorCode TaoCreate_TRON(Tao tao)
307: {
308:   TAO_TRON   *tron;
309:   const char *morethuente_type = TAOLINESEARCHMT;

311:   PetscFunctionBegin;
312:   tao->ops->setup          = TaoSetup_TRON;
313:   tao->ops->solve          = TaoSolve_TRON;
314:   tao->ops->view           = TaoView_TRON;
315:   tao->ops->setfromoptions = TaoSetFromOptions_TRON;
316:   tao->ops->destroy        = TaoDestroy_TRON;
317:   tao->ops->computedual    = TaoComputeDual_TRON;

319:   PetscCall(PetscNew(&tron));
320:   tao->data = (void *)tron;

322:   /* Override default settings (unless already changed) */
323:   if (!tao->max_it_changed) tao->max_it = 50;
324:   if (!tao->trust0_changed) tao->trust0 = 1.0;
325:   if (!tao->steptol_changed) tao->steptol = 0.0;

327:   /* Initialize pointers and variables */
328:   tron->n        = 0;
329:   tron->maxgpits = 3;
330:   tron->pg_ftol  = 0.001;

332:   tron->eta1 = 1.0e-4;
333:   tron->eta2 = 0.25;
334:   tron->eta3 = 0.50;
335:   tron->eta4 = 0.90;

337:   tron->sigma1 = 0.5;
338:   tron->sigma2 = 2.0;
339:   tron->sigma3 = 4.0;

341:   tron->gp_iterates  = 0; /* Cumulative number */
342:   tron->total_gp_its = 0;
343:   tron->n_free       = 0;

345:   tron->DXFree     = NULL;
346:   tron->R          = NULL;
347:   tron->X_New      = NULL;
348:   tron->G_New      = NULL;
349:   tron->Work       = NULL;
350:   tron->Free_Local = NULL;
351:   tron->H_sub      = NULL;
352:   tron->Hpre_sub   = NULL;
353:   tao->subset_type = TAO_SUBSET_SUBVEC;

355:   PetscCall(TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch));
356:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1));
357:   PetscCall(TaoLineSearchSetType(tao->linesearch, morethuente_type));
358:   PetscCall(TaoLineSearchUseTaoRoutines(tao->linesearch, tao));
359:   PetscCall(TaoLineSearchSetOptionsPrefix(tao->linesearch, tao->hdr.prefix));

361:   PetscCall(KSPCreate(((PetscObject)tao)->comm, &tao->ksp));
362:   PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1));
363:   PetscCall(KSPSetOptionsPrefix(tao->ksp, tao->hdr.prefix));
364:   PetscCall(KSPSetType(tao->ksp, KSPSTCG));
365:   PetscFunctionReturn(PETSC_SUCCESS);
366: }