xref: /petsc/src/tao/bound/impls/bncg/bncg.c (revision c4b75bccfe5bcbbfb1eb60f6996a18ddf23ce09b)
1 #include <petsctaolinesearch.h>
2 #include <../src/tao/bound/impls/bncg/bncg.h>
3 
4 #define CG_FletcherReeves       0
5 #define CG_PolakRibiere         1
6 #define CG_PolakRibierePlus     2
7 #define CG_HestenesStiefel      3
8 #define CG_DaiYuan              4
9 #define CG_Types                5
10 
11 static const char *CG_Table[64] = {"fr", "pr", "prp", "hs", "dy"};
12 
13 #define CG_AS_NONE       0
14 #define CG_AS_BERTSEKAS  1
15 #define CG_AS_SIZE       2
16 
17 static const char *CG_AS_TYPE[64] = {"none", "bertsekas"};
18 
19 PetscErrorCode TaoBNCGSetRecycleFlag(Tao tao, PetscBool recycle)
20 {
21   TAO_BNCG                     *cg = (TAO_BNCG*)tao->data;
22 
23   PetscFunctionBegin;
24   cg->recycle = recycle;
25   PetscFunctionReturn(0);
26 }
27 
28 PetscErrorCode TaoBNCGEstimateActiveSet(Tao tao, PetscInt asType)
29 {
30   PetscErrorCode               ierr;
31   TAO_BNCG                     *cg = (TAO_BNCG *)tao->data;
32 
33   PetscFunctionBegin;
34   ierr = ISDestroy(&cg->inactive_old);CHKERRQ(ierr);
35   if (cg->inactive_idx) {
36     ierr = ISDuplicate(cg->inactive_idx, &cg->inactive_old);CHKERRQ(ierr);
37     ierr = ISCopy(cg->inactive_idx, cg->inactive_old);CHKERRQ(ierr);
38   }
39   switch (asType) {
40   case CG_AS_NONE:
41     ierr = ISDestroy(&cg->inactive_idx);CHKERRQ(ierr);
42     ierr = VecWhichInactive(tao->XL, tao->solution, cg->unprojected_gradient, tao->XU, PETSC_TRUE, &cg->inactive_idx);CHKERRQ(ierr);
43     ierr = ISDestroy(&cg->active_idx);CHKERRQ(ierr);
44     ierr = ISComplementVec(cg->inactive_idx, tao->solution, &cg->active_idx);CHKERRQ(ierr);
45     break;
46 
47   case CG_AS_BERTSEKAS:
48     /* Use gradient descent to estimate the active set */
49     ierr = VecCopy(cg->unprojected_gradient, cg->W);CHKERRQ(ierr);
50     ierr = VecScale(cg->W, -1.0);CHKERRQ(ierr);
51     ierr = TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, cg->unprojected_gradient, cg->W, cg->work, cg->as_step, &cg->as_tol, &cg->active_lower, &cg->active_upper, &cg->active_fixed, &cg->active_idx, &cg->inactive_idx);CHKERRQ(ierr);
52     break;
53 
54   default:
55     break;
56   }
57   PetscFunctionReturn(0);
58 }
59 
60 PetscErrorCode TaoBNCGBoundStep(Tao tao, Vec step)
61 {
62   PetscErrorCode               ierr;
63   TAO_BNCG                     *cg = (TAO_BNCG *)tao->data;
64 
65   PetscFunctionBegin;
66   switch (cg->as_type) {
67   case CG_AS_NONE:
68     if (cg->active_idx) {
69       ierr = VecISSet(step, cg->active_idx, 0.0);CHKERRQ(ierr);
70     }
71     break;
72 
73   case CG_AS_BERTSEKAS:
74     ierr = TaoBoundStep(tao->solution, tao->XL, tao->XU, cg->active_lower, cg->active_upper, cg->active_fixed, 1.0, step);CHKERRQ(ierr);
75     break;
76 
77   default:
78     break;
79   }
80   PetscFunctionReturn(0);
81 }
82 
83 static PetscErrorCode TaoSolve_BNCG(Tao tao)
84 {
85   TAO_BNCG                     *cg = (TAO_BNCG*)tao->data;
86   PetscErrorCode               ierr;
87   TaoLineSearchConvergedReason ls_status = TAOLINESEARCH_CONTINUE_ITERATING;
88   PetscReal                    step=1.0,gnorm,gnorm2,gd,ginner,beta,dnorm,resnorm;
89   PetscReal                    gd_old,gnorm2_old,f_old;
90   PetscBool                    cg_restart;
91   PetscInt                     nDiff;
92 
93   PetscFunctionBegin;
94   /*   Project the current point onto the feasible set */
95   ierr = TaoComputeVariableBounds(tao);CHKERRQ(ierr);
96   ierr = TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);CHKERRQ(ierr);
97 
98   /* Project the initial point onto the feasible region */
99   ierr = TaoBoundSolution(tao->XL,tao->XU,tao->solution, &nDiff);CHKERRQ(ierr);
100 
101   if (!cg->recycle) {
102     /*  Solver is not being recycled so just compute the objective function and criteria */
103     ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &cg->f, cg->unprojected_gradient);CHKERRQ(ierr);
104   } else {
105     /* We are recycling, so we have to compute ||g_old||^2 for use in the CG step calculation */
106     ierr = VecDot(cg->G_old, cg->G_old, &gnorm2_old);CHKERRQ(ierr);
107   }
108   ierr = VecNorm(cg->unprojected_gradient,NORM_2,&gnorm);CHKERRQ(ierr);
109   if (PetscIsInfOrNanReal(cg->f) || PetscIsInfOrNanReal(gnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
110 
111   /* Estimate the active set and compute the projected gradient */
112   ierr = VecCopy(cg->unprojected_gradient, cg->W);CHKERRQ(ierr);
113   ierr = VecScale(cg->W, -1.0);CHKERRQ(ierr);
114   ierr = TaoBNCGEstimateActiveSet(tao, cg->as_type);CHKERRQ(ierr);
115 
116   /* Project the gradient and calculate the norm */
117   ierr = VecCopy(cg->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
118   ierr = VecISSet(tao->gradient, cg->active_idx, 0.0);CHKERRQ(ierr);
119   ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr);
120   gnorm2 = gnorm*gnorm;
121 
122   /* Convergence check */
123   tao->niter = 0;
124   tao->reason = TAO_CONTINUE_ITERATING;
125   ierr = VecFischer(tao->solution, cg->unprojected_gradient, tao->XL, tao->XU, cg->W);CHKERRQ(ierr);
126   ierr = VecNorm(cg->W, NORM_2, &resnorm);CHKERRQ(ierr);
127   ierr = TaoLogConvergenceHistory(tao, cg->f, resnorm, 0.0, tao->ksp_its);CHKERRQ(ierr);
128   ierr = TaoMonitor(tao, tao->niter, cg->f, resnorm, 0.0, step);CHKERRQ(ierr);
129   ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
130   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
131 
132   /* Start optimization iterations */
133   cg->ls_fails = cg->broken_ortho = cg->descent_error = 0;
134   cg->resets = -1;
135   while (tao->reason == TAO_CONTINUE_ITERATING) {
136     /* Check restart conditions for using steepest descent */
137     ++tao->niter;
138     cg_restart = PETSC_FALSE;
139     ierr = VecDot(tao->gradient, cg->G_old, &ginner);CHKERRQ(ierr);
140     ierr = VecNorm(tao->stepdirection, NORM_2, &dnorm);CHKERRQ(ierr);
141     if (tao->niter == 1 && !cg->recycle && dnorm != 0.0) {
142       /* 1) First iteration, with recycle disabled, and a non-zero previous step */
143       cg_restart = PETSC_TRUE;
144     } else if (PetscAbsScalar(ginner) >= cg->eta * gnorm2) {
145       /* 2) Gradients are far from orthogonal */
146       cg_restart = PETSC_TRUE;
147       ++cg->broken_ortho;
148     }
149 
150     /* Compute CG step */
151     if (cg_restart) {
152       beta = 0.0;
153       ++cg->resets;
154     } else {
155       switch (cg->cg_type) {
156       case CG_FletcherReeves:
157         beta = gnorm2 / gnorm2_old;
158         break;
159 
160       case CG_PolakRibiere:
161         beta = (gnorm2 - ginner) / gnorm2_old;
162         break;
163 
164       case CG_PolakRibierePlus:
165         beta = PetscMax((gnorm2-ginner)/gnorm2_old, 0.0);
166         break;
167 
168       case CG_HestenesStiefel:
169         ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
170         ierr = VecDot(cg->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr);
171         beta = (gnorm2 - ginner) / (gd - gd_old);
172         break;
173 
174       case CG_DaiYuan:
175         ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
176         ierr = VecDot(cg->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr);
177         beta = gnorm2 / (gd - gd_old);
178         break;
179 
180       default:
181         beta = 0.0;
182         break;
183       }
184     }
185 
186     /*  Compute the direction d=-g + beta*d */
187     ierr = VecAXPBY(tao->stepdirection, -1.0, beta, tao->gradient);CHKERRQ(ierr);
188     ierr = TaoBNCGBoundStep(tao, tao->stepdirection);CHKERRQ(ierr);
189     if (cg->inactive_old) {
190       /* Compute which new indexes that were active before became inactive this iteration */
191       ierr = ISDestroy(&cg->new_inactives);CHKERRQ(ierr);
192       ierr = ISDifference(cg->inactive_old, cg->inactive_idx, &cg->new_inactives);
193       /* Selectively reset the CG step those freshly inactive variables to be the gradient descent direction */
194       ierr = VecGetSubVector(tao->stepdirection, cg->new_inactives, &cg->inactive_step);CHKERRQ(ierr);
195       ierr = VecGetSubVector(tao->gradient, cg->new_inactives, &cg->inactive_grad);CHKERRQ(ierr);
196       ierr = VecCopy(cg->inactive_grad, cg->inactive_step);CHKERRQ(ierr);
197       ierr = VecScale(cg->inactive_step, -1.0);CHKERRQ(ierr);
198       ierr = VecRestoreSubVector(tao->stepdirection, cg->new_inactives, &cg->inactive_step);CHKERRQ(ierr);
199       ierr = VecRestoreSubVector(tao->gradient, cg->new_inactives, &cg->inactive_grad);CHKERRQ(ierr);
200     }
201 
202     /* Verify that this is a descent direction */
203     ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr);
204     ierr = VecNorm(tao->stepdirection, NORM_2, &dnorm);
205     if (gd > -cg->rho*PetscPowReal(dnorm, cg->pow)) {
206       /* Not a descent direction, so we reset back to projected gradient descent */
207       ierr = VecAXPBY(tao->stepdirection, -1.0, 0.0, tao->gradient);CHKERRQ(ierr);
208       ++cg->resets;
209       ++cg->descent_error;
210     }
211 
212     /* Store solution and gradient info before it changes */
213     ierr = VecCopy(tao->solution, cg->X_old);CHKERRQ(ierr);
214     ierr = VecCopy(tao->gradient, cg->G_old);CHKERRQ(ierr);
215     ierr = VecCopy(cg->unprojected_gradient, cg->unprojected_gradient_old);CHKERRQ(ierr);
216     gnorm2_old = gnorm2;
217     f_old = cg->f;
218 
219     /* Perform bounded line search */
220     step = 1.0;
221     ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &cg->f, cg->unprojected_gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr);
222     ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
223 
224     /*  Check linesearch failure */
225     if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER) {
226       ++cg->ls_fails;
227       /* Restore previous point */
228       gnorm2 = gnorm2_old;
229       cg->f = f_old;
230       ierr = VecCopy(cg->X_old, tao->solution);CHKERRQ(ierr);
231       ierr = VecCopy(cg->G_old, tao->gradient);CHKERRQ(ierr);
232       ierr = VecCopy(cg->unprojected_gradient_old, cg->unprojected_gradient);CHKERRQ(ierr);
233 
234       /* Fall back on the gradient descent step */
235       ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr);
236       ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
237       ierr = TaoBNCGBoundStep(tao, tao->stepdirection);CHKERRQ(ierr);
238 
239       step = 1.0;
240       ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &cg->f, cg->unprojected_gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr);
241       ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
242 
243       if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER){
244         ++cg->ls_fails;
245         /* Restore previous point */
246         gnorm2 = gnorm2_old;
247         cg->f = f_old;
248         ierr = VecCopy(cg->X_old, tao->solution);CHKERRQ(ierr);
249         ierr = VecCopy(cg->G_old, tao->gradient);CHKERRQ(ierr);
250         ierr = VecCopy(cg->unprojected_gradient_old, cg->unprojected_gradient);CHKERRQ(ierr);
251 
252         /* Nothing left to do but fail out of the optimization */
253         step = 0.0;
254         tao->reason = TAO_DIVERGED_LS_FAILURE;
255       }
256     }
257 
258     if (tao->reason != TAO_DIVERGED_LS_FAILURE) {
259       /* Estimate the active set at the new solution */
260       ierr = TaoBNCGEstimateActiveSet(tao, cg->as_type);CHKERRQ(ierr);
261 
262       /* Compute the projected gradient and its norm */
263       ierr = VecCopy(cg->unprojected_gradient, tao->gradient);CHKERRQ(ierr);
264       ierr = VecISSet(tao->gradient, cg->active_idx, 0.0);CHKERRQ(ierr);
265       ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr);
266       gnorm2 = gnorm*gnorm;
267     }
268 
269     /* Convergence test */
270     ierr = VecFischer(tao->solution, cg->unprojected_gradient, tao->XL, tao->XU, cg->W);CHKERRQ(ierr);
271     ierr = VecNorm(cg->W, NORM_2, &resnorm);CHKERRQ(ierr);
272     ierr = TaoLogConvergenceHistory(tao, cg->f, resnorm, 0.0, tao->ksp_its);CHKERRQ(ierr);
273     ierr = TaoMonitor(tao, tao->niter, cg->f, resnorm, 0.0, step);CHKERRQ(ierr);
274     ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 static PetscErrorCode TaoSetUp_BNCG(Tao tao)
280 {
281   TAO_BNCG         *cg = (TAO_BNCG*)tao->data;
282   PetscErrorCode ierr;
283 
284   PetscFunctionBegin;
285   if (!tao->gradient) {
286     ierr = VecDuplicate(tao->solution,&tao->gradient);CHKERRQ(ierr);
287   }
288   if (!tao->stepdirection) {
289     ierr = VecDuplicate(tao->solution,&tao->stepdirection);CHKERRQ(ierr);
290   }
291   if (!cg->W) {
292     ierr = VecDuplicate(tao->solution,&cg->W);CHKERRQ(ierr);
293   }
294   if (!cg->work) {
295     ierr = VecDuplicate(tao->solution,&cg->work);CHKERRQ(ierr);
296   }
297   if (!cg->X_old) {
298     ierr = VecDuplicate(tao->solution,&cg->X_old);CHKERRQ(ierr);
299   }
300   if (!cg->G_old) {
301     ierr = VecDuplicate(tao->gradient,&cg->G_old);CHKERRQ(ierr);
302   }
303   if (!cg->unprojected_gradient) {
304     ierr = VecDuplicate(tao->gradient,&cg->unprojected_gradient);CHKERRQ(ierr);
305   }
306   if (!cg->unprojected_gradient_old) {
307     ierr = VecDuplicate(tao->gradient,&cg->unprojected_gradient_old);CHKERRQ(ierr);
308   }
309   PetscFunctionReturn(0);
310 }
311 
312 static PetscErrorCode TaoDestroy_BNCG(Tao tao)
313 {
314   TAO_BNCG       *cg = (TAO_BNCG*) tao->data;
315   PetscErrorCode ierr;
316 
317   PetscFunctionBegin;
318   if (tao->setupcalled) {
319     ierr = VecDestroy(&cg->W);CHKERRQ(ierr);
320     ierr = VecDestroy(&cg->work);CHKERRQ(ierr);
321     ierr = VecDestroy(&cg->X_old);CHKERRQ(ierr);
322     ierr = VecDestroy(&cg->G_old);CHKERRQ(ierr);
323     ierr = VecDestroy(&cg->unprojected_gradient);CHKERRQ(ierr);
324     ierr = VecDestroy(&cg->unprojected_gradient_old);CHKERRQ(ierr);
325   }
326   ierr = PetscFree(tao->data);CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330 static PetscErrorCode TaoSetFromOptions_BNCG(PetscOptionItems *PetscOptionsObject,Tao tao)
331  {
332     TAO_BNCG       *cg = (TAO_BNCG*)tao->data;
333     PetscErrorCode ierr;
334 
335     PetscFunctionBegin;
336     ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr);
337     ierr = PetscOptionsHead(PetscOptionsObject,"Nonlinear Conjugate Gradient method for unconstrained optimization");CHKERRQ(ierr);
338     ierr = PetscOptionsReal("-tao_bncg_eta","restart tolerance", "", cg->eta,&cg->eta,NULL);CHKERRQ(ierr);
339     ierr = PetscOptionsReal("-tao_bncg_rho","descent direction tolerance", "", cg->rho,&cg->rho,NULL);CHKERRQ(ierr);
340     ierr = PetscOptionsReal("-tao_bncg_pow","descent direction exponent", "", cg->pow,&cg->pow,NULL);CHKERRQ(ierr);
341     ierr = PetscOptionsEList("-tao_bncg_type","cg formula", "", CG_Table, CG_Types, CG_Table[cg->cg_type], &cg->cg_type,NULL);CHKERRQ(ierr);
342     ierr = PetscOptionsEList("-tao_bncg_as_type","active set estimation method", "", CG_AS_TYPE, CG_AS_SIZE, CG_AS_TYPE[cg->cg_type], &cg->cg_type,NULL);CHKERRQ(ierr);
343     ierr = PetscOptionsReal("-tao_bncg_delta_min","minimum delta value", "", cg->delta_min,&cg->delta_min,NULL);CHKERRQ(ierr);
344     ierr = PetscOptionsReal("-tao_bncg_delta_max","maximum delta value", "", cg->delta_max,&cg->delta_max,NULL);CHKERRQ(ierr);
345     ierr = PetscOptionsBool("-tao_bncg_recycle","enable recycling the existing solution and gradient at the start of a new solve","",cg->recycle,&cg->recycle,NULL);CHKERRQ(ierr);
346     ierr = PetscOptionsReal("-tao_bncg_as_tol", "initial tolerance used when estimating actively bounded variables","",cg->as_tol,&cg->as_tol,NULL);CHKERRQ(ierr);
347     ierr = PetscOptionsReal("-tao_bncg_as_step", "step length used when estimating actively bounded variables","",cg->as_step,&cg->as_step,NULL);CHKERRQ(ierr);
348    ierr = PetscOptionsTail();CHKERRQ(ierr);
349    PetscFunctionReturn(0);
350 }
351 
352 static PetscErrorCode TaoView_BNCG(Tao tao, PetscViewer viewer)
353 {
354   PetscBool      isascii;
355   TAO_BNCG       *cg = (TAO_BNCG*)tao->data;
356   PetscErrorCode ierr;
357 
358   PetscFunctionBegin;
359   ierr = PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr);
360   if (isascii) {
361     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
362     ierr = PetscViewerASCIIPrintf(viewer, "CG Type: %s\n", CG_Table[cg->cg_type]);CHKERRQ(ierr);
363     ierr = PetscViewerASCIIPrintf(viewer, "Resets: %i\n", cg->resets);CHKERRQ(ierr);
364     ierr = PetscViewerASCIIPrintf(viewer, "  Broken ortho: %i\n", cg->broken_ortho);CHKERRQ(ierr);
365     ierr = PetscViewerASCIIPrintf(viewer, "  Not a descent dir.: %i\n", cg->descent_error);CHKERRQ(ierr);
366     ierr = PetscViewerASCIIPrintf(viewer, "Line search fails: %i\n", cg->ls_fails);CHKERRQ(ierr);
367     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
368   }
369   PetscFunctionReturn(0);
370 }
371 
372 /*MC
373      TAOBNCG -   Bound-constrained Nonlinear Conjugate Gradient method.
374 
375    Options Database Keys:
376 +      -tao_bncg_recycle - enable recycling the latest calculated gradient vector in subsequent TaoSolve() calls
377 .      -tao_bncg_eta <r> - restart tolerance
378 .      -tao_bncg_type <taocg_type> - cg formula
379 .      -tao_bncg_as_type <none,bertsekas> - active set estimation method
380 .      -tao_bncg_as_tol <r> - tolerance used in Bertsekas active-set estimation
381 .      -tao_bncg_as_step <r> - trial step length used in Bertsekas active-set estimation
382 .      -tao_bncg_delta_min <r> - minimum delta value
383 -      -tao_bncg_delta_max <r> - maximum delta value
384 
385   Notes:
386      CG formulas are:
387          "fr" - Fletcher-Reeves
388          "pr" - Polak-Ribiere
389          "prp" - Polak-Ribiere-Plus
390          "hs" - Hestenes-Steifel
391          "dy" - Dai-Yuan
392   Level: beginner
393 M*/
394 
395 
396 PETSC_EXTERN PetscErrorCode TaoCreate_BNCG(Tao tao)
397 {
398   TAO_BNCG       *cg;
399   const char     *morethuente_type = TAOLINESEARCHMT;
400   PetscErrorCode ierr;
401 
402   PetscFunctionBegin;
403   tao->ops->setup = TaoSetUp_BNCG;
404   tao->ops->solve = TaoSolve_BNCG;
405   tao->ops->view = TaoView_BNCG;
406   tao->ops->setfromoptions = TaoSetFromOptions_BNCG;
407   tao->ops->destroy = TaoDestroy_BNCG;
408 
409   /* Override default settings (unless already changed) */
410   if (!tao->max_it_changed) tao->max_it = 2000;
411   if (!tao->max_funcs_changed) tao->max_funcs = 4000;
412 
413   /*  Note: nondefault values should be used for nonlinear conjugate gradient  */
414   /*  method.  In particular, gtol should be less that 0.5; the value used in  */
415   /*  Nocedal and Wright is 0.10.  We use the default values for the  */
416   /*  linesearch because it seems to work better. */
417   ierr = TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);CHKERRQ(ierr);
418   ierr = PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);CHKERRQ(ierr);
419   ierr = TaoLineSearchSetType(tao->linesearch, morethuente_type);CHKERRQ(ierr);
420   ierr = TaoLineSearchUseTaoRoutines(tao->linesearch, tao);CHKERRQ(ierr);
421   ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr);
422 
423   ierr = PetscNewLog(tao,&cg);CHKERRQ(ierr);
424   tao->data = (void*)cg;
425   cg->rho = 1e-4;
426   cg->pow = 2.1;
427   cg->eta = 0.5;
428   cg->delta_min = 1e-7;
429   cg->delta_max = 100;
430   cg->as_step = 0.001;
431   cg->as_tol = 0.001;
432   cg->as_type = CG_AS_BERTSEKAS;
433   cg->cg_type = CG_DaiYuan;
434   cg->recycle = PETSC_FALSE;
435   PetscFunctionReturn(0);
436 }
437