xref: /petsc/src/tao/bound/impls/bnk/bnls.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 #include <../src/tao/bound/impls/bnk/bnk.h>
2 #include <petscksp.h>
3 
4 /*
5  Implements Newton's Method with a line search approach for
6  solving bound constrained minimization problems.
7 
8  ------------------------------------------------------------
9 
10  x_0 = VecMedian(x_0)
11  f_0, g_0 = TaoComputeObjectiveAndGradient(x_0)
12  pg_0 = project(g_0)
13  check convergence at pg_0
14  needH = TaoBNKInitialize(default:BNK_INIT_DIRECTION)
15  niter = 0
16  step_accepted = true
17 
18  while niter < max_it
19     niter += 1
20 
21     if needH
22       If max_cg_steps > 0
23         x_k, g_k, pg_k = TaoSolve(BNCG)
24       end
25 
26       H_k = TaoComputeHessian(x_k)
27       if pc_type == BNK_PC_BFGS
28         add correction to BFGS approx
29         if scale_type == BNK_SCALE_AHESS
30           D = VecMedian(1e-6, abs(diag(H_k)), 1e6)
31           scale BFGS with VecReciprocal(D)
32         end
33       end
34       needH = False
35     end
36 
37     if pc_type = BNK_PC_BFGS
38       B_k = BFGS
39     else
40       B_k = VecMedian(1e-6, abs(diag(H_k)), 1e6)
41       B_k = VecReciprocal(B_k)
42     end
43     w = x_k - VecMedian(x_k - 0.001*B_k*g_k)
44     eps = min(eps, norm2(w))
45     determine the active and inactive index sets such that
46       L = {i : (x_k)_i <= l_i + eps && (g_k)_i > 0}
47       U = {i : (x_k)_i >= u_i - eps && (g_k)_i < 0}
48       F = {i : l_i = (x_k)_i = u_i}
49       A = {L + U + F}
50       IA = {i : i not in A}
51 
52     generate the reduced system Hr_k dr_k = -gr_k for variables in IA
53     if p > 0
54       Hr_k += p*
55     end
56     if pc_type == BNK_PC_BFGS && scale_type == BNK_SCALE_PHESS
57       D = VecMedian(1e-6, abs(diag(Hr_k)), 1e6)
58       scale BFGS with VecReciprocal(D)
59     end
60     solve Hr_k dr_k = -gr_k
61     set d_k to (l - x) for variables in L, (u - x) for variables in U, and 0 for variables in F
62 
63     if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
64       dr_k = -BFGS*gr_k for variables in I
65       if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
66         reset the BFGS preconditioner
67         calculate scale delta and apply it to BFGS
68         dr_k = -BFGS*gr_k for variables in I
69         if dot(d_k, pg_k)) >= 0 || norm(d_k) == NaN || norm(d_k) == Inf
70           dr_k = -gr_k for variables in I
71         end
72       end
73     end
74 
75     x_{k+1}, f_{k+1}, g_{k+1}, ls_failed = TaoBNKPerformLineSearch()
76     if ls_failed
77       f_{k+1} = f_k
78       x_{k+1} = x_k
79       g_{k+1} = g_k
80       pg_{k+1} = pg_k
81       terminate
82     else
83       pg_{k+1} = project(g_{k+1})
84       count the accepted step type (Newton, BFGS, scaled grad or grad)
85     end
86 
87     check convergence at pg_{k+1}
88  end
89 */
90 
91 PetscErrorCode TaoSolve_BNLS(Tao tao)
92 {
93   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
94   KSPConvergedReason           ksp_reason;
95   TaoLineSearchConvergedReason ls_reason;
96   PetscReal                    steplen = 1.0, resnorm;
97   PetscBool                    cgTerminate, needH = PETSC_TRUE, stepAccepted, shift = PETSC_TRUE;
98   PetscInt                     stepType;
99 
100   PetscFunctionBegin;
101   /* Initialize the preconditioner, KSP solver and trust radius/line search */
102   tao->reason = TAO_CONTINUE_ITERATING;
103   PetscCall(TaoBNKInitialize(tao, bnk->init_type, &needH));
104   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
105 
106   /* Have not converged; continue with Newton method */
107   while (tao->reason == TAO_CONTINUE_ITERATING) {
108     /* Call general purpose update function */
109     if (tao->ops->update) {
110       PetscCall((*tao->ops->update)(tao, tao->niter, tao->user_update));
111     }
112     ++tao->niter;
113 
114     if (needH && bnk->inactive_idx) {
115       /* Take BNCG steps (if enabled) to trade-off Hessian evaluations for more gradient evaluations */
116       PetscCall(TaoBNKTakeCGSteps(tao, &cgTerminate));
117       if (cgTerminate) {
118         tao->reason = bnk->bncg->reason;
119         PetscFunctionReturn(0);
120       }
121       /* Compute the hessian and update the BFGS preconditioner at the new iterate */
122       PetscCall((*bnk->computehessian)(tao));
123       needH = PETSC_FALSE;
124     }
125 
126     /* Use the common BNK kernel to compute the safeguarded Newton step (for inactive variables only) */
127     PetscCall((*bnk->computestep)(tao, shift, &ksp_reason, &stepType));
128     PetscCall(TaoBNKSafeguardStep(tao, ksp_reason, &stepType));
129 
130     /* Store current solution before it changes */
131     bnk->fold = bnk->f;
132     PetscCall(VecCopy(tao->solution, bnk->Xold));
133     PetscCall(VecCopy(tao->gradient, bnk->Gold));
134     PetscCall(VecCopy(bnk->unprojected_gradient, bnk->unprojected_gradient_old));
135 
136     /* Trigger the line search */
137     PetscCall(TaoBNKPerformLineSearch(tao, &stepType, &steplen, &ls_reason));
138 
139     if (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER) {
140       /* Failed to find an improving point */
141       needH = PETSC_FALSE;
142       bnk->f = bnk->fold;
143       PetscCall(VecCopy(bnk->Xold, tao->solution));
144       PetscCall(VecCopy(bnk->Gold, tao->gradient));
145       PetscCall(VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient));
146       steplen = 0.0;
147       tao->reason = TAO_DIVERGED_LS_FAILURE;
148     } else {
149       /* new iterate so we need to recompute the Hessian */
150       needH = PETSC_TRUE;
151       /* compute the projected gradient */
152       PetscCall(TaoBNKEstimateActiveSet(tao, bnk->as_type));
153       PetscCall(VecCopy(bnk->unprojected_gradient, tao->gradient));
154       PetscCall(VecISSet(tao->gradient, bnk->active_idx, 0.0));
155       PetscCall(TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm));
156       /* update the trust radius based on the step length */
157       PetscCall(TaoBNKUpdateTrustRadius(tao, 0.0, 0.0, BNK_UPDATE_STEP, stepType, &stepAccepted));
158       /* count the accepted step type */
159       PetscCall(TaoBNKAddStepCounts(tao, stepType));
160       /* active BNCG recycling for next iteration */
161       PetscCall(TaoSetRecycleHistory(bnk->bncg, PETSC_TRUE));
162     }
163 
164     /*  Check for termination */
165     PetscCall(VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W));
166     PetscCall(VecNorm(bnk->W, NORM_2, &resnorm));
167     PetscCheck(!PetscIsInfOrNanReal(resnorm),PetscObjectComm((PetscObject)tao),PETSC_ERR_USER, "User provided compute function generated Inf or NaN");
168     PetscCall(TaoLogConvergenceHistory(tao, bnk->f, resnorm, 0.0, tao->ksp_its));
169     PetscCall(TaoMonitor(tao, tao->niter, bnk->f, resnorm, 0.0, steplen));
170     PetscCall((*tao->ops->convergencetest)(tao, tao->cnvP));
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 /*------------------------------------------------------------*/
176 /*MC
177   TAOBNLS - Bounded Newton Line Search for nonlinear minimization with bound constraints.
178 
179   Options Database Keys:
180 + -tao_bnk_max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop
181 . -tao_bnk_init_type - trust radius initialization method ("constant", "direction", "interpolation")
182 . -tao_bnk_update_type - trust radius update method ("step", "direction", "interpolation")
183 - -tao_bnk_as_type - active-set estimation method ("none", "bertsekas")
184 
185   Level: beginner
186 M*/
187 PETSC_EXTERN PetscErrorCode TaoCreate_BNLS(Tao tao)
188 {
189   TAO_BNK        *bnk;
190 
191   PetscFunctionBegin;
192   PetscCall(TaoCreate_BNK(tao));
193   tao->ops->solve = TaoSolve_BNLS;
194 
195   bnk = (TAO_BNK *)tao->data;
196   bnk->init_type = BNK_INIT_DIRECTION;
197   bnk->update_type = BNK_UPDATE_STEP; /* trust region updates based on line search step length */
198   PetscFunctionReturn(0);
199 }
200