xref: /petsc/src/tao/bound/impls/bnk/bnls.c (revision df278d8f2b91157cf6d5980446bdf033f772efe7)
1 #include <../src/tao/bound/impls/bnk/bnk.h>
2 #include <petscksp.h>
3 
4 /*
5  Implements Newton's Method with a line search approach for solving
6  bound constrained minimization problems. A projected More'-Thuente line
7  search is used to guarantee that the bfgs preconditioner remains positive
8  definite.
9 
10  The method can shift the Hessian matrix. The shifting procedure is
11  adapted from the PATH algorithm for solving complementarity
12  problems.
13 
14  The linear system solve should be done with a conjugate gradient
15  method, although any method can be used.
16 */
17 
18 static PetscErrorCode TaoSolve_BNLS(Tao tao)
19 {
20   PetscErrorCode               ierr;
21   TAO_BNK                      *bnk = (TAO_BNK *)tao->data;
22   TaoLineSearchConvergedReason ls_reason;
23 
24   PetscReal                    f_full, prered, actred;
25   PetscReal                    steplen = 1.0;
26 
27   PetscBool                    trustAccept;
28   PetscInt                     stepType;
29   PetscInt                     bfgsUpdates = 0;
30 
31   PetscFunctionBegin;
32   /*   Project the current point onto the feasible set */
33   ierr = TaoComputeVariableBounds(tao);CHKERRQ(ierr);
34   ierr = TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);CHKERRQ(ierr);
35 
36   /* Project the initial point onto the feasible region */
37   ierr = VecMedian(tao->XL,tao->solution,tao->XU,tao->solution);CHKERRQ(ierr);
38 
39   /* Check convergence criteria */
40   ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);CHKERRQ(ierr);
41   ierr = VecBoundGradientProjection(bnk->unprojected_gradient,tao->solution,tao->XL,tao->XU,tao->gradient);CHKERRQ(ierr);
42   ierr = TaoGradientNorm(tao, tao->gradient,NORM_2,&bnk->gnorm);CHKERRQ(ierr);
43   if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(bnk->gnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN");
44 
45   tao->reason = TAO_CONTINUE_ITERATING;
46   ierr = TaoLogConvergenceHistory(tao,bnk->f,bnk->gnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
47   ierr = TaoMonitor(tao,tao->niter,bnk->f,bnk->gnorm,0.0,steplen);CHKERRQ(ierr);
48   ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
49   if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0);
50 
51   /* Initialize the preconditioner and trust radius */
52   ierr = TaoBNKInitialize(tao);CHKERRQ(ierr);
53 
54   /* Have not converged; continue with Newton method */
55   while (tao->reason == TAO_CONTINUE_ITERATING) {
56     ++tao->niter;
57     tao->ksp_its=0;
58 
59     /* Compute the Hessian */
60     ierr = TaoComputeHessian(tao,tao->solution,tao->hessian,tao->hessian_pre);CHKERRQ(ierr);
61 
62     /* Update the BFGS preconditioner */
63     if (BNK_PC_BFGS == bnk->pc_type) {
64       if (BFGS_SCALE_PHESS == bnk->bfgs_scale_type) {
65         /* Obtain diagonal for the bfgs preconditioner  */
66         ierr = MatGetDiagonal(tao->hessian, bnk->Diag);CHKERRQ(ierr);
67         ierr = VecAbs(bnk->Diag);CHKERRQ(ierr);
68         ierr = VecReciprocal(bnk->Diag);CHKERRQ(ierr);
69         ierr = MatLMVMSetScale(bnk->M,bnk->Diag);CHKERRQ(ierr);
70       }
71       /* Update the limited memory preconditioner and get existing # of updates */
72       ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr);
73       ierr = MatLMVMGetUpdates(bnk->M, &bfgsUpdates);CHKERRQ(ierr);
74     }
75 
76     /* Use the common BNK kernel to compute the safeguarded Newton step */
77     ierr = TaoBNKComputeStep(tao, PETSC_TRUE, &stepType);CHKERRQ(ierr);
78 
79     /* Store current solution before it changes */
80     bnk->fold = bnk->f;
81     ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr);
82     ierr = VecCopy(tao->gradient, bnk->Gold);CHKERRQ(ierr);
83     ierr = VecCopy(bnk->unprojected_gradient, bnk->unprojected_gradient_old);CHKERRQ(ierr);
84 
85     /* Trigger the line search */
86     ierr = TaoBNKPerformLineSearch(tao, stepType, &steplen, &ls_reason);CHKERRQ(ierr);
87 
88     if (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER) {
89       /* Failed to find an improving point */
90       bnk->f = bnk->fold;
91       ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr);
92       ierr = VecCopy(bnk->Gold, tao->gradient);CHKERRQ(ierr);
93       ierr = VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);CHKERRQ(ierr);
94       steplen = 0.0;
95       tao->reason = TAO_DIVERGED_LS_FAILURE;
96       break;
97     }
98 
99     /* update trust radius */
100     ierr = TaoLineSearchGetFullStepObjective(tao->linesearch, &f_full);CHKERRQ(ierr);
101     ierr = KSPCGGetObjFcn(tao->ksp, &prered);CHKERRQ(ierr);
102     prered = -prered;
103     actred = bnk->fold - f_full;
104     ierr = TaoBNKUpdateTrustRadius(tao, prered, actred, stepType, &trustAccept);CHKERRQ(ierr);
105 
106     /*  Check for termination */
107     ierr = TaoGradientNorm(tao, tao->gradient,NORM_2,&bnk->gnorm);CHKERRQ(ierr);
108     if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(bnk->gnorm)) SETERRQ(PETSC_COMM_SELF,1,"User provided compute function generated Not-a-Number");
109     ierr = TaoLogConvergenceHistory(tao,bnk->f,bnk->gnorm,0.0,tao->ksp_its);CHKERRQ(ierr);
110     ierr = TaoMonitor(tao,tao->niter,bnk->f,bnk->gnorm,0.0,steplen);CHKERRQ(ierr);
111     ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr);
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 /*------------------------------------------------------------*/
117 
118 PETSC_EXTERN PetscErrorCode TaoCreate_BNLS(Tao tao)
119 {
120   TAO_BNK        *bnk;
121   PetscErrorCode ierr;
122 
123   PetscFunctionBegin;
124   ierr = TaoCreate_BNK(tao);CHKERRQ(ierr);
125   tao->ops->solve = TaoSolve_BNLS;
126 
127   bnk = (TAO_BNK *)tao->data;
128   bnk->update_type = BNK_UPDATE_STEP; /* trust region updates based on line search step length */
129   PetscFunctionReturn(0);
130 }