xref: /petsc/src/tao/bound/impls/bnk/bnls.c (revision 080d2917cae42b0a43dd96fa397634a71dcc282e)
1 #include <../src/tao/bound/impls/bnk/bnk.h>
2 #include <petscksp.h>
3 
4 /*
5  Implements Newton's Method with a line search approach for solving
6  unconstrained minimization problems.  A More'-Thuente line search
7  is used to guarantee that the bfgs preconditioner remains positive
8  definite.
9 
10  The method can shift the Hessian matrix.  The shifting procedure is
11  adapted from the PATH algorithm for solving complementarity
12  problems.
13 
14  The linear system solve should be done with a conjugate gradient
15  method, although any method can be used.
16 */
17 
18 static PetscErrorCode TaoSolve_BNLS(Tao tao)
19 {
20   PetscErrorCode               ierr;
21   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
22   TaoLineSearchConvergedReason ls_reason;
23 
24   PetscReal                    f_full, gdx;
25   PetscReal                    step = 1.0;
26   PetscReal                    delta;
27   PetscReal                    e_min;
28 
29   PetscBool                    trustAccept;
30   PetscInt                     stepType;
31   PetscInt                     bfgsUpdates = 0;
32   PetscInt                     needH = 1;
33 
34   PetscFunctionBegin;
35   if (tao->XL || tao->XU || tao->ops->computebounds) {
36     ierr = PetscPrintf(((PetscObject)tao)->comm,"WARNING: Variable bounds have been set but will be ignored by nls algorithm\n");CHKERRQ(ierr);
37   }
38 
39   /* Check convergence criteria */
40   ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, tao->gradient);CHKERRQ(ierr);
41   ierr = TaoGradientNorm(tao, tao->gradient,NORM_2,&bnk->gnorm);CHKERRQ(ierr);
42   if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(bnk->gnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
43 
44   tao->reason = TAO_CONTINUE_ITERATING;
45   ierr = TaoLogConvergenceHistory(tao,bnk->f,bnk->gnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
46   ierr = TaoMonitor(tao,tao->niter,bnk->f,bnk->gnorm,0.0,step);CHKERRQ(ierr);
47   ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
48   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
49 
50   /* Initialize the preconditioner and trust radius */
51   ierr = TaoBNKInitialize(tao);CHKERRQ(ierr);
52 
53   /* Have not converged; continue with Newton method */
54   while (tao->reason == TAO_CONTINUE_ITERATING) {
55     ++tao->niter;
56     tao->ksp_its=0;
57 
58     /* Use the common BNK kernel to compute the step */
59     ierr = TaoBNKComputeStep(tao, &stepType);CHKERRQ(ierr);
60 
61     /* Store current solution before it changes */
62     bnk->fold = bnk->f;
63     ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr);
64     ierr = VecCopy(tao->gradient, bnk->Gold);CHKERRQ(ierr);
65 
66     /* Perform the linesearch */
67     ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, tao->gradient, tao->stepdirection, &step, &ls_reason);CHKERRQ(ierr);
68     ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
69 
70     while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && stepType != BNK_GRADIENT) {
71       /* Linesearch failed, revert solution */
72       bnk->f = bnk->fold;
73       ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr);
74       ierr = VecCopy(bnk->Gold, tao->gradient);CHKERRQ(ierr);
75 
76       switch(stepType) {
77       case BNK_NEWTON:
78         /* Failed to obtain acceptable iterate with Newton 1step
79            Update the perturbation for next time */
80         if (bnk->pert <= 0.0) {
81           /* Initialize the perturbation */
82           bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm));
83           if (bnk->is_gltr) {
84             ierr = KSPCGGLTRGetMinEig(tao->ksp,&e_min);CHKERRQ(ierr);
85             bnk->pert = PetscMax(bnk->pert, -e_min);
86           }
87         } else {
88           /* Increase the perturbation */
89           bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm));
90         }
91 
92         if (BNK_PC_BFGS != bnk->pc_type) {
93           /* We don't have the bfgs matrix around and being updated
94              Must use gradient direction in this case */
95           ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr);
96           ++bnk->grad;
97           stepType = BNK_GRADIENT;
98         } else {
99           /* Attempt to use the BFGS direction */
100           ierr = MatLMVMSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr);
101           /* Check for success (descent direction) */
102           ierr = VecDot(tao->solution, tao->stepdirection, &gdx);CHKERRQ(ierr);
103           if ((gdx <= 0) || PetscIsInfOrNanReal(gdx)) {
104             /* BFGS direction is not descent or direction produced not a number
105                We can assert bfgsUpdates > 1 in this case
106                Use steepest descent direction (scaled) */
107 
108             if (bnk->f != 0.0) {
109               delta = 2.0 * PetscAbsScalar(bnk->f) / (bnk->gnorm*bnk->gnorm);
110             } else {
111               delta = 2.0 / (bnk->gnorm*bnk->gnorm);
112             }
113             ierr = MatLMVMSetDelta(bnk->M, delta);CHKERRQ(ierr);
114             ierr = MatLMVMReset(bnk->M);CHKERRQ(ierr);
115             ierr = MatLMVMUpdate(bnk->M, tao->solution, tao->gradient);CHKERRQ(ierr);
116             ierr = MatLMVMSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr);
117 
118             bfgsUpdates = 1;
119             ++bnk->sgrad;
120             stepType = BNK_SCALED_GRADIENT;
121           } else {
122             ierr = MatLMVMGetUpdates(bnk->M, &bfgsUpdates);CHKERRQ(ierr);
123             if (1 == bfgsUpdates) {
124               /* The first BFGS direction is always the scaled gradient */
125               ++bnk->sgrad;
126               stepType = BNK_SCALED_GRADIENT;
127             } else {
128               ++bnk->bfgs;
129               stepType = BNK_BFGS;
130             }
131           }
132         }
133         break;
134 
135       case BNK_BFGS:
136         /* Can only enter if pc_type == BNK_PC_BFGS
137            Failed to obtain acceptable iterate with BFGS step
138            Attempt to use the scaled gradient direction */
139 
140         if (bnk->f != 0.0) {
141           delta = 2.0 * PetscAbsScalar(bnk->f) / (bnk->gnorm*bnk->gnorm);
142         } else {
143           delta = 2.0 / (bnk->gnorm*bnk->gnorm);
144         }
145         ierr = MatLMVMSetDelta(bnk->M, delta);CHKERRQ(ierr);
146         ierr = MatLMVMReset(bnk->M);CHKERRQ(ierr);
147         ierr = MatLMVMUpdate(bnk->M, tao->solution, tao->gradient);CHKERRQ(ierr);
148         ierr = MatLMVMSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr);
149 
150         bfgsUpdates = 1;
151         ++bnk->sgrad;
152         stepType = BNK_SCALED_GRADIENT;
153         break;
154 
155       case BNK_SCALED_GRADIENT:
156         /* Can only enter if pc_type == BNK_PC_BFGS
157            The scaled gradient step did not produce a new iterate;
158            attemp to use the gradient direction.
159            Need to make sure we are not using a different diagonal scaling */
160 
161         ierr = MatLMVMSetScale(bnk->M,0);CHKERRQ(ierr);
162         ierr = MatLMVMSetDelta(bnk->M,1.0);CHKERRQ(ierr);
163         ierr = MatLMVMReset(bnk->M);CHKERRQ(ierr);
164         ierr = MatLMVMUpdate(bnk->M, tao->solution, tao->gradient);CHKERRQ(ierr);
165         ierr = MatLMVMSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr);
166 
167         bfgsUpdates = 1;
168         ++bnk->grad;
169         stepType = BNK_GRADIENT;
170         break;
171       }
172       ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr);
173 
174       ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, tao->gradient, tao->stepdirection, &step, &ls_reason);CHKERRQ(ierr);
175       ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr);
176     }
177 
178     if (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER) {
179       /* Failed to find an improving point */
180       bnk->f = bnk->fold;
181       ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr);
182       ierr = VecCopy(bnk->Gold, tao->gradient);CHKERRQ(ierr);
183       step = 0.0;
184       tao->reason = TAO_DIVERGED_LS_FAILURE;
185       break;
186     }
187 
188     /* update trust radius */
189     ierr = TaoLineSearchGetFullStepObjective(tao->linesearch, &f_full);CHKERRQ(ierr);
190     ierr = TaoBNKUpdateTrustRadius(tao, bnk->fold, f_full, stepType, &trustAccept);CHKERRQ(ierr);
191 
192     /*  Check for termination */
193     ierr = TaoGradientNorm(tao, tao->gradient,NORM_2,&bnk->gnorm);CHKERRQ(ierr);
194     if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(bnk->gnorm)) SETERRQ(PETSC_COMM_SELF,1,"User provided compute function generated Not-a-Number");
195     needH = 1;
196     ierr = TaoLogConvergenceHistory(tao,bnk->f,bnk->gnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
197     ierr = TaoMonitor(tao,tao->niter,bnk->f,bnk->gnorm,0.0,step);CHKERRQ(ierr);
198     ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
199   }
200   PetscFunctionReturn(0);
201 }
202 
203 PETSC_EXTERN PetscErrorCode TaoCreate_BNLS(Tao tao)
204 {
205   PetscErrorCode ierr;
206 
207   PetscFunctionBegin;
208   ierr = TaoCreate_BNK(tao);CHKERRQ(ierr);
209   tao->ops->solve=TaoSolve_BNLS;
210   PetscFunctionReturn(0);
211 }