Actual source code: morethuente.c
1: #include <petsc/private/taolinesearchimpl.h>
2: #include <../src/tao/linesearch/impls/morethuente/morethuente.h>
4: /*
5: This algorithm is taken from More' and Thuente, "Line search algorithms
6: with guaranteed sufficient decrease", Argonne National Laboratory,
7: Technical Report MCS-P330-1092.
8: */
10: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp);
12: static PetscErrorCode TaoLineSearchDestroy_MT(TaoLineSearch ls)
13: {
14: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)(ls->data);
16: PetscFunctionBegin;
17: PetscCall(PetscObjectDereference((PetscObject)mt->x));
18: PetscCall(VecDestroy(&mt->work));
19: PetscCall(PetscFree(ls->data));
20: PetscFunctionReturn(PETSC_SUCCESS);
21: }
23: static PetscErrorCode TaoLineSearchSetFromOptions_MT(TaoLineSearch ls, PetscOptionItems *PetscOptionsObject)
24: {
25: PetscFunctionBegin;
26: PetscFunctionReturn(PETSC_SUCCESS);
27: }
29: static PetscErrorCode TaoLineSearchMonitor_MT(TaoLineSearch ls)
30: {
31: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)ls->data;
33: PetscFunctionBegin;
34: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "stx: %g, fx: %g, dgx: %g\n", (double)mt->stx, (double)mt->fx, (double)mt->dgx));
35: PetscCall(PetscViewerASCIIPrintf(ls->viewer, "sty: %g, fy: %g, dgy: %g\n", (double)mt->sty, (double)mt->fy, (double)mt->dgy));
36: PetscFunctionReturn(PETSC_SUCCESS);
37: }
39: static PetscErrorCode TaoLineSearchApply_MT(TaoLineSearch ls, Vec x, PetscReal *f, Vec g, Vec s)
40: {
41: TaoLineSearch_MT *mt = (TaoLineSearch_MT *)(ls->data);
42: PetscReal xtrapf = 4.0;
43: PetscReal finit, width, width1, dginit, fm, fxm, fym, dgm, dgxm, dgym;
44: PetscReal dgx, dgy, dg, dg2, fx, fy, stx, sty, dgtest;
45: PetscReal ftest1 = 0.0, ftest2 = 0.0;
46: PetscInt i, stage1, n1, n2, nn1, nn2;
47: PetscReal bstepmin1, bstepmin2, bstepmax, ostepmin, ostepmax;
48: PetscBool g_computed = PETSC_FALSE; /* to prevent extra gradient computation */
50: PetscFunctionBegin;
51: ls->reason = TAOLINESEARCH_CONTINUE_ITERATING;
52: PetscCall(TaoLineSearchMonitor(ls, 0, *f, 0.0));
53: /* Check work vector */
54: if (!mt->work) {
55: PetscCall(VecDuplicate(x, &mt->work));
56: mt->x = x;
57: PetscCall(PetscObjectReference((PetscObject)mt->x));
58: } else if (x != mt->x) {
59: PetscCall(VecDestroy(&mt->work));
60: PetscCall(VecDuplicate(x, &mt->work));
61: PetscCall(PetscObjectDereference((PetscObject)mt->x));
62: mt->x = x;
63: PetscCall(PetscObjectReference((PetscObject)mt->x));
64: }
66: ostepmax = ls->stepmax;
67: ostepmin = ls->stepmin;
69: if (ls->bounded) {
70: /* Compute step length needed to make all variables equal a bound */
71: /* Compute the smallest steplength that will make one nonbinding variable
72: equal the bound */
73: PetscCall(VecGetLocalSize(ls->upper, &n1));
74: PetscCall(VecGetLocalSize(mt->x, &n2));
75: PetscCall(VecGetSize(ls->upper, &nn1));
76: PetscCall(VecGetSize(mt->x, &nn2));
77: PetscCheck(n1 == n2 && nn1 == nn2, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Variable vector not compatible with bounds vector");
78: PetscCall(VecScale(s, -1.0));
79: PetscCall(VecBoundGradientProjection(s, x, ls->lower, ls->upper, s));
80: PetscCall(VecScale(s, -1.0));
81: PetscCall(VecStepBoundInfo(x, s, ls->lower, ls->upper, &bstepmin1, &bstepmin2, &bstepmax));
82: ls->stepmax = PetscMin(bstepmax, ls->stepmax);
83: }
85: PetscCall(VecDot(g, s, &dginit));
86: if (PetscIsInfOrNanReal(dginit)) {
87: PetscCall(PetscInfo(ls, "Initial Line Search step * g is Inf or Nan (%g)\n", (double)dginit));
88: ls->reason = TAOLINESEARCH_FAILED_INFORNAN;
89: PetscFunctionReturn(PETSC_SUCCESS);
90: }
91: if (dginit >= 0.0) {
92: PetscCall(PetscInfo(ls, "Initial Line Search step * g is not descent direction (%g)\n", (double)dginit));
93: ls->reason = TAOLINESEARCH_FAILED_ASCENT;
94: PetscFunctionReturn(PETSC_SUCCESS);
95: }
97: /* Initialization */
98: mt->bracket = 0;
99: stage1 = 1;
100: finit = *f;
101: dgtest = ls->ftol * dginit;
102: width = ls->stepmax - ls->stepmin;
103: width1 = width * 2.0;
104: PetscCall(VecCopy(x, mt->work));
105: /* Variable dictionary:
106: stx, fx, dgx - the step, function, and derivative at the best step
107: sty, fy, dgy - the step, function, and derivative at the other endpoint
108: of the interval of uncertainty
109: step, f, dg - the step, function, and derivative at the current step */
111: stx = 0.0;
112: fx = finit;
113: dgx = dginit;
114: sty = 0.0;
115: fy = finit;
116: dgy = dginit;
118: ls->step = ls->initstep;
119: for (i = 0; i < ls->max_funcs; i++) {
120: /* Set min and max steps to correspond to the interval of uncertainty */
121: if (mt->bracket) {
122: ls->stepmin = PetscMin(stx, sty);
123: ls->stepmax = PetscMax(stx, sty);
124: } else {
125: ls->stepmin = stx;
126: ls->stepmax = ls->step + xtrapf * (ls->step - stx);
127: }
129: /* Force the step to be within the bounds */
130: ls->step = PetscMax(ls->step, ls->stepmin);
131: ls->step = PetscMin(ls->step, ls->stepmax);
133: /* If an unusual termination is to occur, then let step be the lowest
134: point obtained thus far */
135: if (stx != 0 && ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) || (ls->nfeval + ls->nfgeval >= ls->max_funcs - 1) || mt->infoc == 0))
136: ls->step = stx;
138: PetscCall(VecWAXPY(mt->work, ls->step, s, x)); /* W = X + step*S */
140: if (ls->bounded) PetscCall(VecMedian(ls->lower, mt->work, ls->upper, mt->work));
141: if (ls->usegts) {
142: PetscCall(TaoLineSearchComputeObjectiveAndGTS(ls, mt->work, f, &dg));
143: g_computed = PETSC_FALSE;
144: } else {
145: PetscCall(TaoLineSearchComputeObjectiveAndGradient(ls, mt->work, f, g));
146: g_computed = PETSC_TRUE;
147: if (ls->bounded) {
148: PetscCall(VecDot(g, x, &dg));
149: PetscCall(VecDot(g, mt->work, &dg2));
150: dg = (dg2 - dg) / ls->step;
151: } else {
152: PetscCall(VecDot(g, s, &dg));
153: }
154: }
156: /* update bracketing parameters in the MT context for printouts in monitor */
157: mt->stx = stx;
158: mt->fx = fx;
159: mt->dgx = dgx;
160: mt->sty = sty;
161: mt->fy = fy;
162: mt->dgy = dgy;
163: PetscCall(TaoLineSearchMonitor(ls, i + 1, *f, ls->step));
165: if (i == 0) ls->f_fullstep = *f;
167: if (PetscIsInfOrNanReal(*f) || PetscIsInfOrNanReal(dg)) {
168: /* User provided compute function generated Not-a-Number, assume
169: domain violation and set function value and directional
170: derivative to infinity. */
171: *f = PETSC_INFINITY;
172: dg = PETSC_INFINITY;
173: }
175: ftest1 = finit + ls->step * dgtest;
176: if (ls->bounded) ftest2 = finit + ls->step * dgtest * ls->ftol;
178: /* Convergence testing */
179: if ((*f - ftest1 <= PETSC_SMALL * PetscAbsReal(finit)) && (PetscAbsReal(dg) + ls->gtol * dginit <= 0.0)) {
180: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease and directional deriv conditions hold\n"));
181: ls->reason = TAOLINESEARCH_SUCCESS;
182: break;
183: }
185: /* Check Armijo if beyond the first breakpoint */
186: if (ls->bounded && *f <= ftest2 && ls->step >= bstepmin2) {
187: PetscCall(PetscInfo(ls, "Line search success: Sufficient decrease.\n"));
188: ls->reason = TAOLINESEARCH_SUCCESS;
189: break;
190: }
192: /* Checks for bad cases */
193: if ((mt->bracket && (ls->step <= ls->stepmin || ls->step >= ls->stepmax)) || !mt->infoc) {
194: PetscCall(PetscInfo(ls, "Rounding errors may prevent further progress. May not be a step satisfying\nsufficient decrease and curvature conditions. Tolerances may be too small.\n"));
195: ls->reason = TAOLINESEARCH_HALTED_OTHER;
196: break;
197: }
198: if (ls->step == ls->stepmax && *f <= ftest1 && dg <= dgtest) {
199: PetscCall(PetscInfo(ls, "Step is at the upper bound, stepmax (%g)\n", (double)ls->stepmax));
200: ls->reason = TAOLINESEARCH_HALTED_UPPERBOUND;
201: break;
202: }
203: if (ls->step == ls->stepmin && *f >= ftest1 && dg >= dgtest) {
204: PetscCall(PetscInfo(ls, "Step is at the lower bound, stepmin (%g)\n", (double)ls->stepmin));
205: ls->reason = TAOLINESEARCH_HALTED_LOWERBOUND;
206: break;
207: }
208: if (mt->bracket && (ls->stepmax - ls->stepmin <= ls->rtol * ls->stepmax)) {
209: PetscCall(PetscInfo(ls, "Relative width of interval of uncertainty is at most rtol (%g)\n", (double)ls->rtol));
210: ls->reason = TAOLINESEARCH_HALTED_RTOL;
211: break;
212: }
214: /* In the first stage, we seek a step for which the modified function
215: has a nonpositive value and nonnegative derivative */
216: if (stage1 && *f <= ftest1 && dg >= dginit * PetscMin(ls->ftol, ls->gtol)) stage1 = 0;
218: /* A modified function is used to predict the step only if we
219: have not obtained a step for which the modified function has a
220: nonpositive function value and nonnegative derivative, and if a
221: lower function value has been obtained but the decrease is not
222: sufficient */
224: if (stage1 && *f <= fx && *f > ftest1) {
225: fm = *f - ls->step * dgtest; /* Define modified function */
226: fxm = fx - stx * dgtest; /* and derivatives */
227: fym = fy - sty * dgtest;
228: dgm = dg - dgtest;
229: dgxm = dgx - dgtest;
230: dgym = dgy - dgtest;
232: /* if (dgxm * (ls->step - stx) >= 0.0) */
233: /* Update the interval of uncertainty and compute the new step */
234: PetscCall(Tao_mcstep(ls, &stx, &fxm, &dgxm, &sty, &fym, &dgym, &ls->step, &fm, &dgm));
236: fx = fxm + stx * dgtest; /* Reset the function and */
237: fy = fym + sty * dgtest; /* gradient values */
238: dgx = dgxm + dgtest;
239: dgy = dgym + dgtest;
240: } else {
241: /* Update the interval of uncertainty and compute the new step */
242: PetscCall(Tao_mcstep(ls, &stx, &fx, &dgx, &sty, &fy, &dgy, &ls->step, f, &dg));
243: }
245: /* Force a sufficient decrease in the interval of uncertainty */
246: if (mt->bracket) {
247: if (PetscAbsReal(sty - stx) >= 0.66 * width1) ls->step = stx + 0.5 * (sty - stx);
248: width1 = width;
249: width = PetscAbsReal(sty - stx);
250: }
251: }
252: if (ls->nfeval + ls->nfgeval > ls->max_funcs) {
253: PetscCall(PetscInfo(ls, "Number of line search function evals (%" PetscInt_FMT ") > maximum (%" PetscInt_FMT ")\n", ls->nfeval + ls->nfgeval, ls->max_funcs));
254: ls->reason = TAOLINESEARCH_HALTED_MAXFCN;
255: }
256: ls->stepmax = ostepmax;
257: ls->stepmin = ostepmin;
259: /* Finish computations */
260: PetscCall(PetscInfo(ls, "%" PetscInt_FMT " function evals in line search, step = %g\n", ls->nfeval + ls->nfgeval, (double)ls->step));
262: /* Set new solution vector and compute gradient if needed */
263: PetscCall(VecCopy(mt->work, x));
264: if (!g_computed) PetscCall(TaoLineSearchComputeGradient(ls, mt->work, g));
265: PetscFunctionReturn(PETSC_SUCCESS);
266: }
268: /*MC
269: TAOLINESEARCHMT - Line-search type with cubic interpolation that satisfies both the sufficient decrease and
270: curvature conditions. This method can take step lengths greater than 1.
272: More-Thuente line-search can be selected with "-tao_ls_type more-thuente".
274: References:
275: . * - JORGE J. MORE AND DAVID J. THUENTE, LINE SEARCH ALGORITHMS WITH GUARANTEED SUFFICIENT DECREASE.
276: ACM Trans. Math. Software 20, no. 3 (1994): 286-307.
278: Level: developer
280: .seealso: `TaoLineSearchCreate()`, `TaoLineSearchSetType()`, `TaoLineSearchApply()`
282: .keywords: Tao, linesearch
283: M*/
284: PETSC_EXTERN PetscErrorCode TaoLineSearchCreate_MT(TaoLineSearch ls)
285: {
286: TaoLineSearch_MT *ctx;
288: PetscFunctionBegin;
290: PetscCall(PetscNew(&ctx));
291: ctx->bracket = 0;
292: ctx->infoc = 1;
293: ls->data = (void *)ctx;
294: ls->initstep = 1.0;
295: ls->ops->setup = NULL;
296: ls->ops->reset = NULL;
297: ls->ops->apply = TaoLineSearchApply_MT;
298: ls->ops->destroy = TaoLineSearchDestroy_MT;
299: ls->ops->setfromoptions = TaoLineSearchSetFromOptions_MT;
300: ls->ops->monitor = TaoLineSearchMonitor_MT;
301: PetscFunctionReturn(PETSC_SUCCESS);
302: }
304: /*
305: The subroutine mcstep is taken from the work of Jorge Nocedal.
306: this is a variant of More' and Thuente's routine.
308: subroutine mcstep
310: the purpose of mcstep is to compute a safeguarded step for
311: a linesearch and to update an interval of uncertainty for
312: a minimizer of the function.
314: the parameter stx contains the step with the least function
315: value. the parameter stp contains the current step. it is
316: assumed that the derivative at stx is negative in the
317: direction of the step. if bracket is set true then a
318: minimizer has been bracketed in an interval of uncertainty
319: with endpoints stx and sty.
321: the subroutine statement is
323: subroutine mcstep(stx,fx,dx,sty,fy,dy,stp,fp,dp,bracket,
324: stpmin,stpmax,info)
326: where
328: stx, fx, and dx are variables which specify the step,
329: the function, and the derivative at the best step obtained
330: so far. The derivative must be negative in the direction
331: of the step, that is, dx and stp-stx must have opposite
332: signs. On output these parameters are updated appropriately.
334: sty, fy, and dy are variables which specify the step,
335: the function, and the derivative at the other endpoint of
336: the interval of uncertainty. On output these parameters are
337: updated appropriately.
339: stp, fp, and dp are variables which specify the step,
340: the function, and the derivative at the current step.
341: If bracket is set true then on input stp must be
342: between stx and sty. On output stp is set to the new step.
344: bracket is a logical variable which specifies if a minimizer
345: has been bracketed. If the minimizer has not been bracketed
346: then on input bracket must be set false. If the minimizer
347: is bracketed then on output bracket is set true.
349: stpmin and stpmax are input variables which specify lower
350: and upper bounds for the step.
352: info is an integer output variable set as follows:
353: if info = 1,2,3,4,5, then the step has been computed
354: according to one of the five cases below. otherwise
355: info = 0, and this indicates improper input parameters.
357: subprograms called
359: fortran-supplied ... abs,max,min,sqrt
361: argonne national laboratory. minpack project. june 1983
362: jorge j. more', david j. thuente
364: */
366: static PetscErrorCode Tao_mcstep(TaoLineSearch ls, PetscReal *stx, PetscReal *fx, PetscReal *dx, PetscReal *sty, PetscReal *fy, PetscReal *dy, PetscReal *stp, PetscReal *fp, PetscReal *dp)
367: {
368: TaoLineSearch_MT *mtP = (TaoLineSearch_MT *)ls->data;
369: PetscReal gamma1, p, q, r, s, sgnd, stpc, stpf, stpq, theta;
370: PetscInt bound;
372: PetscFunctionBegin;
373: /* Check the input parameters for errors */
374: mtP->infoc = 0;
375: PetscCheck(!mtP->bracket || (*stp > PetscMin(*stx, *sty) && *stp < PetscMax(*stx, *sty)), PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "bad stp in bracket");
376: PetscCheck(*dx * (*stp - *stx) < 0.0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "dx * (stp-stx) >= 0.0");
377: PetscCheck(ls->stepmax >= ls->stepmin, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "stepmax > stepmin");
379: /* Determine if the derivatives have opposite sign */
380: sgnd = *dp * (*dx / PetscAbsReal(*dx));
382: if (*fp > *fx) {
383: /* Case 1: a higher function value.
384: The minimum is bracketed. If the cubic step is closer
385: to stx than the quadratic step, the cubic step is taken,
386: else the average of the cubic and quadratic steps is taken. */
388: mtP->infoc = 1;
389: bound = 1;
390: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
391: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
392: s = PetscMax(s, PetscAbsReal(*dp));
393: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
394: if (*stp < *stx) gamma1 = -gamma1;
395: /* Can p be 0? Check */
396: p = (gamma1 - *dx) + theta;
397: q = ((gamma1 - *dx) + gamma1) + *dp;
398: r = p / q;
399: stpc = *stx + r * (*stp - *stx);
400: stpq = *stx + ((*dx / ((*fx - *fp) / (*stp - *stx) + *dx)) * 0.5) * (*stp - *stx);
402: if (PetscAbsReal(stpc - *stx) < PetscAbsReal(stpq - *stx)) stpf = stpc;
403: else stpf = stpc + 0.5 * (stpq - stpc);
404: mtP->bracket = 1;
405: } else if (sgnd < 0.0) {
406: /* Case 2: A lower function value and derivatives of
407: opposite sign. The minimum is bracketed. If the cubic
408: step is closer to stx than the quadratic (secant) step,
409: the cubic step is taken, else the quadratic step is taken. */
411: mtP->infoc = 2;
412: bound = 0;
413: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
414: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
415: s = PetscMax(s, PetscAbsReal(*dp));
416: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s));
417: if (*stp > *stx) gamma1 = -gamma1;
418: p = (gamma1 - *dp) + theta;
419: q = ((gamma1 - *dp) + gamma1) + *dx;
420: r = p / q;
421: stpc = *stp + r * (*stx - *stp);
422: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
424: if (PetscAbsReal(stpc - *stp) > PetscAbsReal(stpq - *stp)) stpf = stpc;
425: else stpf = stpq;
426: mtP->bracket = 1;
427: } else if (PetscAbsReal(*dp) < PetscAbsReal(*dx)) {
428: /* Case 3: A lower function value, derivatives of the
429: same sign, and the magnitude of the derivative decreases.
430: The cubic step is only used if the cubic tends to infinity
431: in the direction of the step or if the minimum of the cubic
432: is beyond stp. Otherwise the cubic step is defined to be
433: either stepmin or stepmax. The quadratic (secant) step is also
434: computed and if the minimum is bracketed then the step
435: closest to stx is taken, else the step farthest away is taken. */
437: mtP->infoc = 3;
438: bound = 1;
439: theta = 3 * (*fx - *fp) / (*stp - *stx) + *dx + *dp;
440: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dx));
441: s = PetscMax(s, PetscAbsReal(*dp));
443: /* The case gamma1 = 0 only arises if the cubic does not tend
444: to infinity in the direction of the step. */
445: gamma1 = s * PetscSqrtScalar(PetscMax(0.0, PetscPowScalar(theta / s, 2.0) - (*dx / s) * (*dp / s)));
446: if (*stp > *stx) gamma1 = -gamma1;
447: p = (gamma1 - *dp) + theta;
448: q = (gamma1 + (*dx - *dp)) + gamma1;
449: r = p / q;
450: if (r < 0.0 && gamma1 != 0.0) stpc = *stp + r * (*stx - *stp);
451: else if (*stp > *stx) stpc = ls->stepmax;
452: else stpc = ls->stepmin;
453: stpq = *stp + (*dp / (*dp - *dx)) * (*stx - *stp);
455: if (mtP->bracket) {
456: if (PetscAbsReal(*stp - stpc) < PetscAbsReal(*stp - stpq)) stpf = stpc;
457: else stpf = stpq;
458: } else {
459: if (PetscAbsReal(*stp - stpc) > PetscAbsReal(*stp - stpq)) stpf = stpc;
460: else stpf = stpq;
461: }
462: } else {
463: /* Case 4: A lower function value, derivatives of the
464: same sign, and the magnitude of the derivative does
465: not decrease. If the minimum is not bracketed, the step
466: is either stpmin or stpmax, else the cubic step is taken. */
468: mtP->infoc = 4;
469: bound = 0;
470: if (mtP->bracket) {
471: theta = 3 * (*fp - *fy) / (*sty - *stp) + *dy + *dp;
472: s = PetscMax(PetscAbsReal(theta), PetscAbsReal(*dy));
473: s = PetscMax(s, PetscAbsReal(*dp));
474: gamma1 = s * PetscSqrtScalar(PetscPowScalar(theta / s, 2.0) - (*dy / s) * (*dp / s));
475: if (*stp > *sty) gamma1 = -gamma1;
476: p = (gamma1 - *dp) + theta;
477: q = ((gamma1 - *dp) + gamma1) + *dy;
478: r = p / q;
479: stpc = *stp + r * (*sty - *stp);
480: stpf = stpc;
481: } else if (*stp > *stx) {
482: stpf = ls->stepmax;
483: } else {
484: stpf = ls->stepmin;
485: }
486: }
488: /* Update the interval of uncertainty. This update does not
489: depend on the new step or the case analysis above. */
491: if (*fp > *fx) {
492: *sty = *stp;
493: *fy = *fp;
494: *dy = *dp;
495: } else {
496: if (sgnd < 0.0) {
497: *sty = *stx;
498: *fy = *fx;
499: *dy = *dx;
500: }
501: *stx = *stp;
502: *fx = *fp;
503: *dx = *dp;
504: }
506: /* Compute the new step and safeguard it. */
507: stpf = PetscMin(ls->stepmax, stpf);
508: stpf = PetscMax(ls->stepmin, stpf);
509: *stp = stpf;
510: if (mtP->bracket && bound) {
511: if (*sty > *stx) *stp = PetscMin(*stx + 0.66 * (*sty - *stx), *stp);
512: else *stp = PetscMax(*stx + 0.66 * (*sty - *stx), *stp);
513: }
514: PetscFunctionReturn(PETSC_SUCCESS);
515: }