1 #include <petsctaolinesearch.h> 2 #include <../src/tao/bound/impls/bnk/bnk.h> 3 #include <petscksp.h> 4 5 static const char *BNK_INIT[64] = {"constant", "direction", "interpolation"}; 6 static const char *BNK_UPDATE[64] = {"step", "reduction", "interpolation"}; 7 static const char *BNK_AS[64] = {"none", "bertsekas"}; 8 9 /*------------------------------------------------------------*/ 10 11 /* Routine for initializing the KSP solver, the BFGS preconditioner, and the initial trust radius estimation */ 12 13 PetscErrorCode TaoBNKInitialize(Tao tao, PetscInt initType, PetscBool *needH) 14 { 15 PetscErrorCode ierr; 16 TAO_BNK *bnk = (TAO_BNK *)tao->data; 17 PC pc; 18 19 PetscReal f_min, ftrial, prered, actred, kappa, sigma, resnorm; 20 PetscReal tau, tau_1, tau_2, tau_max, tau_min, max_radius; 21 PetscBool is_bfgs, is_jacobi, is_symmetric, sym_set; 22 PetscInt n, N, nDiff; 23 PetscInt i_max = 5; 24 PetscInt j_max = 1; 25 PetscInt i, j; 26 27 PetscFunctionBegin; 28 /* Project the current point onto the feasible set */ 29 ierr = TaoComputeVariableBounds(tao);CHKERRQ(ierr); 30 ierr = TaoSetVariableBounds(bnk->bncg, tao->XL, tao->XU);CHKERRQ(ierr); 31 if (tao->bounded) { 32 ierr = TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU);CHKERRQ(ierr); 33 } 34 35 /* Project the initial point onto the feasible region */ 36 ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr); 37 38 /* Check convergence criteria */ 39 ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &bnk->f, bnk->unprojected_gradient);CHKERRQ(ierr); 40 ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr); 41 ierr = VecCopy(bnk->unprojected_gradient, tao->gradient);CHKERRQ(ierr); 42 ierr = VecISSet(tao->gradient, bnk->active_idx, 0.0);CHKERRQ(ierr); 43 ierr = TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);CHKERRQ(ierr); 44 45 /* Test the initial point for convergence */ 46 ierr = VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);CHKERRQ(ierr); 47 ierr = VecNorm(bnk->W, NORM_2, &resnorm);CHKERRQ(ierr); 48 if (PetscIsInfOrNanReal(bnk->f) || PetscIsInfOrNanReal(resnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 49 ierr = TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);CHKERRQ(ierr); 50 ierr = TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);CHKERRQ(ierr); 51 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 52 if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0); 53 54 /* Reset KSP stopping reason counters */ 55 bnk->ksp_atol = 0; 56 bnk->ksp_rtol = 0; 57 bnk->ksp_dtol = 0; 58 bnk->ksp_ctol = 0; 59 bnk->ksp_negc = 0; 60 bnk->ksp_iter = 0; 61 bnk->ksp_othr = 0; 62 63 /* Reset accepted step type counters */ 64 bnk->tot_cg_its = 0; 65 bnk->newt = 0; 66 bnk->bfgs = 0; 67 bnk->sgrad = 0; 68 bnk->grad = 0; 69 70 /* Initialize the Hessian perturbation */ 71 bnk->pert = bnk->sval; 72 73 /* Reset initial steplength to zero (this helps BNCG reset its direction internally) */ 74 ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr); 75 76 /* Allocate the vectors needed for the BFGS approximation */ 77 ierr = KSPGetPC(tao->ksp, &pc);CHKERRQ(ierr); 78 ierr = PetscObjectTypeCompare((PetscObject)pc, PCLMVM, &is_bfgs);CHKERRQ(ierr); 79 ierr = PetscObjectTypeCompare((PetscObject)pc, PCJACOBI, &is_jacobi);CHKERRQ(ierr); 80 if (is_bfgs) { 81 bnk->bfgs_pre = pc; 82 ierr = PCLMVMGetMatLMVM(bnk->bfgs_pre, &bnk->M);CHKERRQ(ierr); 83 ierr = VecGetLocalSize(tao->solution, &n);CHKERRQ(ierr); 84 ierr = VecGetSize(tao->solution, &N);CHKERRQ(ierr); 85 ierr = MatSetSizes(bnk->M, n, n, N, N);CHKERRQ(ierr); 86 ierr = MatLMVMAllocate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 87 ierr = MatIsSymmetricKnown(bnk->M, &sym_set, &is_symmetric);CHKERRQ(ierr); 88 if (!sym_set || !is_symmetric) SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_INCOMP, "LMVM matrix in the LMVM preconditioner must be symmetric."); 89 } else if (is_jacobi) { 90 ierr = PCJacobiSetUseAbs(pc,PETSC_TRUE);CHKERRQ(ierr); 91 } 92 93 /* Prepare the min/max vectors for safeguarding diagonal scales */ 94 ierr = VecSet(bnk->Diag_min, bnk->dmin);CHKERRQ(ierr); 95 ierr = VecSet(bnk->Diag_max, bnk->dmax);CHKERRQ(ierr); 96 97 /* Initialize trust-region radius. The initialization is only performed 98 when we are using Nash, Steihaug-Toint or the Generalized Lanczos method. */ 99 *needH = PETSC_TRUE; 100 if (bnk->is_nash || bnk->is_stcg || bnk->is_gltr) { 101 switch(initType) { 102 case BNK_INIT_CONSTANT: 103 /* Use the initial radius specified */ 104 tao->trust = tao->trust0; 105 break; 106 107 case BNK_INIT_INTERPOLATION: 108 /* Use interpolation based on the initial Hessian */ 109 max_radius = 0.0; 110 tao->trust = tao->trust0; 111 for (j = 0; j < j_max; ++j) { 112 f_min = bnk->f; 113 sigma = 0.0; 114 115 if (*needH) { 116 /* Compute the Hessian at the new step, and extract the inactive subsystem */ 117 ierr = bnk->computehessian(tao);CHKERRQ(ierr); 118 ierr = TaoBNKEstimateActiveSet(tao, BNK_AS_NONE);CHKERRQ(ierr); 119 ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr); 120 if (bnk->active_idx) { 121 ierr = MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);CHKERRQ(ierr); 122 } else { 123 ierr = MatDuplicate(tao->hessian, MAT_COPY_VALUES, &bnk->H_inactive);CHKERRQ(ierr); 124 } 125 *needH = PETSC_FALSE; 126 } 127 128 for (i = 0; i < i_max; ++i) { 129 /* Take a steepest descent step and snap it to bounds */ 130 ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr); 131 ierr = VecAXPY(tao->solution, -tao->trust/bnk->gnorm, tao->gradient);CHKERRQ(ierr); 132 ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr); 133 /* Compute the step we actually accepted */ 134 ierr = VecCopy(tao->solution, bnk->W);CHKERRQ(ierr); 135 ierr = VecAXPY(bnk->W, -1.0, bnk->Xold);CHKERRQ(ierr); 136 /* Compute the objective at the trial */ 137 ierr = TaoComputeObjective(tao, tao->solution, &ftrial);CHKERRQ(ierr); 138 if (PetscIsInfOrNanReal(bnk->f)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 139 ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr); 140 if (PetscIsInfOrNanReal(ftrial)) { 141 tau = bnk->gamma1_i; 142 } else { 143 if (ftrial < f_min) { 144 f_min = ftrial; 145 sigma = -tao->trust / bnk->gnorm; 146 } 147 148 /* Compute the predicted and actual reduction */ 149 if (bnk->active_idx) { 150 ierr = VecGetSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 151 ierr = VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr); 152 } else { 153 bnk->X_inactive = bnk->W; 154 bnk->inactive_work = bnk->Xwork; 155 } 156 ierr = MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);CHKERRQ(ierr); 157 ierr = VecDot(bnk->X_inactive, bnk->inactive_work, &prered);CHKERRQ(ierr); 158 if (bnk->active_idx) { 159 ierr = VecRestoreSubVector(bnk->W, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 160 ierr = VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr); 161 } 162 prered = tao->trust * (bnk->gnorm - 0.5 * tao->trust * prered / (bnk->gnorm * bnk->gnorm)); 163 actred = bnk->f - ftrial; 164 if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) { 165 kappa = 1.0; 166 } else { 167 kappa = actred / prered; 168 } 169 170 tau_1 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust + (1.0 - bnk->theta_i) * prered - actred); 171 tau_2 = bnk->theta_i * bnk->gnorm * tao->trust / (bnk->theta_i * bnk->gnorm * tao->trust - (1.0 + bnk->theta_i) * prered + actred); 172 tau_min = PetscMin(tau_1, tau_2); 173 tau_max = PetscMax(tau_1, tau_2); 174 175 if (PetscAbsScalar(kappa - 1.0) <= bnk->mu1_i) { 176 /* Great agreement */ 177 max_radius = PetscMax(max_radius, tao->trust); 178 179 if (tau_max < 1.0) { 180 tau = bnk->gamma3_i; 181 } else if (tau_max > bnk->gamma4_i) { 182 tau = bnk->gamma4_i; 183 } else { 184 tau = tau_max; 185 } 186 } else if (PetscAbsScalar(kappa - 1.0) <= bnk->mu2_i) { 187 /* Good agreement */ 188 max_radius = PetscMax(max_radius, tao->trust); 189 190 if (tau_max < bnk->gamma2_i) { 191 tau = bnk->gamma2_i; 192 } else if (tau_max > bnk->gamma3_i) { 193 tau = bnk->gamma3_i; 194 } else { 195 tau = tau_max; 196 } 197 } else { 198 /* Not good agreement */ 199 if (tau_min > 1.0) { 200 tau = bnk->gamma2_i; 201 } else if (tau_max < bnk->gamma1_i) { 202 tau = bnk->gamma1_i; 203 } else if ((tau_min < bnk->gamma1_i) && (tau_max >= 1.0)) { 204 tau = bnk->gamma1_i; 205 } else if ((tau_1 >= bnk->gamma1_i) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1_i) || (tau_2 >= 1.0))) { 206 tau = tau_1; 207 } else if ((tau_2 >= bnk->gamma1_i) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1_i) || (tau_2 >= 1.0))) { 208 tau = tau_2; 209 } else { 210 tau = tau_max; 211 } 212 } 213 } 214 tao->trust = tau * tao->trust; 215 } 216 217 if (f_min < bnk->f) { 218 /* We accidentally found a solution better than the initial, so accept it */ 219 bnk->f = f_min; 220 ierr = VecCopy(tao->solution, bnk->Xold);CHKERRQ(ierr); 221 ierr = VecAXPY(tao->solution,sigma,tao->gradient);CHKERRQ(ierr); 222 ierr = TaoBoundSolution(tao->solution, tao->XL,tao->XU, 0.0, &nDiff, tao->solution);CHKERRQ(ierr); 223 ierr = VecCopy(tao->solution, tao->stepdirection);CHKERRQ(ierr); 224 ierr = VecAXPY(tao->stepdirection, -1.0, bnk->Xold);CHKERRQ(ierr); 225 ierr = TaoComputeGradient(tao,tao->solution,bnk->unprojected_gradient);CHKERRQ(ierr); 226 ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr); 227 ierr = VecCopy(bnk->unprojected_gradient, tao->gradient);CHKERRQ(ierr); 228 ierr = VecISSet(tao->gradient, bnk->active_idx, 0.0);CHKERRQ(ierr); 229 /* Compute gradient at the new iterate and flip switch to compute the Hessian later */ 230 ierr = TaoGradientNorm(tao, tao->gradient, NORM_2, &bnk->gnorm);CHKERRQ(ierr); 231 *needH = PETSC_TRUE; 232 /* Test the new step for convergence */ 233 ierr = VecFischer(tao->solution, bnk->unprojected_gradient, tao->XL, tao->XU, bnk->W);CHKERRQ(ierr); 234 ierr = VecNorm(bnk->W, NORM_2, &resnorm);CHKERRQ(ierr); 235 if (PetscIsInfOrNanReal(resnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 236 ierr = TaoLogConvergenceHistory(tao,bnk->f,resnorm,0.0,tao->ksp_its);CHKERRQ(ierr); 237 ierr = TaoMonitor(tao,tao->niter,bnk->f,resnorm,0.0,1.0);CHKERRQ(ierr); 238 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 239 if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0); 240 /* active BNCG recycling early because we have a stepdirection computed */ 241 ierr = TaoBNCGSetRecycleFlag(bnk->bncg, PETSC_TRUE);CHKERRQ(ierr); 242 } 243 } 244 tao->trust = PetscMax(tao->trust, max_radius); 245 246 /* Ensure that the trust radius is within the limits */ 247 tao->trust = PetscMax(tao->trust, bnk->min_radius); 248 tao->trust = PetscMin(tao->trust, bnk->max_radius); 249 break; 250 251 default: 252 /* Norm of the first direction will initialize radius */ 253 tao->trust = 0.0; 254 break; 255 } 256 } 257 PetscFunctionReturn(0); 258 } 259 260 /*------------------------------------------------------------*/ 261 262 /* Routine for computing the exact Hessian and preparing the preconditioner at the new iterate */ 263 264 PetscErrorCode TaoBNKComputeHessian(Tao tao) 265 { 266 PetscErrorCode ierr; 267 TAO_BNK *bnk = (TAO_BNK *)tao->data; 268 269 PetscFunctionBegin; 270 /* Compute the Hessian */ 271 ierr = TaoComputeHessian(tao,tao->solution,tao->hessian,tao->hessian_pre);CHKERRQ(ierr); 272 /* Add a correction to the BFGS preconditioner */ 273 if (bnk->M) { 274 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 275 } 276 /* Prepare the reduced sub-matrices for the inactive set */ 277 if (bnk->Hpre_inactive) { 278 ierr = MatDestroy(&bnk->Hpre_inactive);CHKERRQ(ierr); 279 } 280 if (bnk->H_inactive) { 281 ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr); 282 } 283 if (bnk->active_idx) { 284 ierr = MatCreateSubMatrix(tao->hessian, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->H_inactive);CHKERRQ(ierr); 285 if (tao->hessian == tao->hessian_pre) { 286 ierr = PetscObjectReference((PetscObject)bnk->H_inactive);CHKERRQ(ierr); 287 bnk->Hpre_inactive = bnk->H_inactive; 288 } else { 289 ierr = MatCreateSubMatrix(tao->hessian_pre, bnk->inactive_idx, bnk->inactive_idx, MAT_INITIAL_MATRIX, &bnk->Hpre_inactive);CHKERRQ(ierr); 290 } 291 if (bnk->bfgs_pre) { 292 ierr = PCLMVMSetIS(bnk->bfgs_pre, bnk->inactive_idx);CHKERRQ(ierr); 293 } 294 } else { 295 ierr = MatDuplicate(tao->hessian, MAT_COPY_VALUES, &bnk->H_inactive);CHKERRQ(ierr); 296 if (tao->hessian == tao->hessian_pre) { 297 ierr = PetscObjectReference((PetscObject)bnk->H_inactive);CHKERRQ(ierr); 298 bnk->Hpre_inactive = bnk->H_inactive; 299 } else { 300 ierr = MatDuplicate(tao->hessian_pre, MAT_COPY_VALUES, &bnk->Hpre_inactive);CHKERRQ(ierr); 301 } 302 if (bnk->bfgs_pre) { 303 ierr = PCLMVMClearIS(bnk->bfgs_pre);CHKERRQ(ierr); 304 } 305 } 306 PetscFunctionReturn(0); 307 } 308 309 /*------------------------------------------------------------*/ 310 311 /* Routine for estimating the active set */ 312 313 PetscErrorCode TaoBNKEstimateActiveSet(Tao tao, PetscInt asType) 314 { 315 PetscErrorCode ierr; 316 TAO_BNK *bnk = (TAO_BNK *)tao->data; 317 PetscBool hessComputed, diagExists; 318 319 PetscFunctionBegin; 320 switch (asType) { 321 case BNK_AS_NONE: 322 ierr = ISDestroy(&bnk->inactive_idx);CHKERRQ(ierr); 323 ierr = VecWhichInactive(tao->XL, tao->solution, bnk->unprojected_gradient, tao->XU, PETSC_TRUE, &bnk->inactive_idx);CHKERRQ(ierr); 324 ierr = ISDestroy(&bnk->active_idx);CHKERRQ(ierr); 325 ierr = ISComplementVec(bnk->inactive_idx, tao->solution, &bnk->active_idx);CHKERRQ(ierr); 326 break; 327 328 case BNK_AS_BERTSEKAS: 329 /* Compute the trial step vector with which we will estimate the active set at the next iteration */ 330 if (bnk->M) { 331 /* If the BFGS preconditioner matrix is available, we will construct a trial step with it */ 332 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, bnk->W);CHKERRQ(ierr); 333 } else { 334 if (tao->hessian) { 335 ierr = MatAssembled(tao->hessian, &hessComputed);CHKERRQ(ierr); 336 ierr = MatHasOperation(tao->hessian, MATOP_GET_DIAGONAL, &diagExists);CHKERRQ(ierr); 337 } else { 338 hessComputed = diagExists = PETSC_FALSE; 339 } 340 if (hessComputed && diagExists) { 341 /* BFGS preconditioner doesn't exist so let's invert the absolute diagonal of the Hessian instead onto the gradient */ 342 ierr = MatGetDiagonal(tao->hessian, bnk->Xwork);CHKERRQ(ierr); 343 ierr = VecAbs(bnk->Xwork);CHKERRQ(ierr); 344 ierr = VecMedian(bnk->Diag_min, bnk->Xwork, bnk->Diag_max, bnk->Xwork);CHKERRQ(ierr); 345 ierr = VecReciprocal(bnk->Xwork);CHKERRQ(ierr);CHKERRQ(ierr); 346 ierr = VecPointwiseMult(bnk->W, bnk->Xwork, bnk->unprojected_gradient);CHKERRQ(ierr); 347 } else { 348 /* If the Hessian or its diagonal does not exist, we will simply use gradient step */ 349 ierr = VecCopy(bnk->unprojected_gradient, bnk->W);CHKERRQ(ierr); 350 } 351 } 352 ierr = VecScale(bnk->W, -1.0);CHKERRQ(ierr); 353 ierr = TaoEstimateActiveBounds(tao->solution, tao->XL, tao->XU, bnk->unprojected_gradient, bnk->W, bnk->Xwork, bnk->as_step, &bnk->as_tol, 354 &bnk->active_lower, &bnk->active_upper, &bnk->active_fixed, &bnk->active_idx, &bnk->inactive_idx);CHKERRQ(ierr); 355 break; 356 357 default: 358 break; 359 } 360 PetscFunctionReturn(0); 361 } 362 363 /*------------------------------------------------------------*/ 364 365 /* Routine for bounding the step direction */ 366 367 PetscErrorCode TaoBNKBoundStep(Tao tao, PetscInt asType, Vec step) 368 { 369 PetscErrorCode ierr; 370 TAO_BNK *bnk = (TAO_BNK *)tao->data; 371 372 PetscFunctionBegin; 373 switch (asType) { 374 case BNK_AS_NONE: 375 ierr = VecISSet(step, bnk->active_idx, 0.0);CHKERRQ(ierr); 376 break; 377 378 case BNK_AS_BERTSEKAS: 379 ierr = TaoBoundStep(tao->solution, tao->XL, tao->XU, bnk->active_lower, bnk->active_upper, bnk->active_fixed, 1.0, step);CHKERRQ(ierr); 380 break; 381 382 default: 383 break; 384 } 385 PetscFunctionReturn(0); 386 } 387 388 /*------------------------------------------------------------*/ 389 390 /* Routine for taking a finite number of BNCG iterations to 391 accelerate Newton convergence. 392 393 In practice, this approach simply trades off Hessian evaluations 394 for more gradient evaluations. 395 */ 396 397 PetscErrorCode TaoBNKTakeCGSteps(Tao tao, PetscBool *terminate) 398 { 399 TAO_BNK *bnk = (TAO_BNK *)tao->data; 400 PetscErrorCode ierr; 401 402 PetscFunctionBegin; 403 *terminate = PETSC_FALSE; 404 if (bnk->max_cg_its > 0) { 405 /* Copy the current function value (important vectors are already shared) */ 406 bnk->bncg_ctx->f = bnk->f; 407 /* Take some small finite number of BNCG iterations */ 408 ierr = TaoSolve(bnk->bncg);CHKERRQ(ierr); 409 /* Add the number of gradient and function evaluations to the total */ 410 tao->nfuncs += bnk->bncg->nfuncs; 411 tao->nfuncgrads += bnk->bncg->nfuncgrads; 412 tao->ngrads += bnk->bncg->ngrads; 413 tao->nhess += bnk->bncg->nhess; 414 bnk->tot_cg_its += bnk->bncg->niter; 415 /* Extract the BNCG function value out and save it into BNK */ 416 bnk->f = bnk->bncg_ctx->f; 417 if (bnk->bncg->reason == TAO_CONVERGED_GATOL || bnk->bncg->reason == TAO_CONVERGED_GRTOL || bnk->bncg->reason == TAO_CONVERGED_GTTOL || bnk->bncg->reason == TAO_CONVERGED_MINF) { 418 *terminate = PETSC_TRUE; 419 } else { 420 ierr = TaoBNKEstimateActiveSet(tao, bnk->as_type);CHKERRQ(ierr); 421 } 422 } 423 PetscFunctionReturn(0); 424 } 425 426 /*------------------------------------------------------------*/ 427 428 /* Routine for computing the Newton step. */ 429 430 PetscErrorCode TaoBNKComputeStep(Tao tao, PetscBool shift, KSPConvergedReason *ksp_reason, PetscInt *step_type) 431 { 432 PetscErrorCode ierr; 433 TAO_BNK *bnk = (TAO_BNK *)tao->data; 434 PetscInt bfgsUpdates = 0; 435 PetscInt kspits; 436 PetscBool is_lmvm, is_virtual; 437 438 PetscFunctionBegin; 439 /* If there are no inactive variables left, save some computation and return an adjusted zero step 440 that has (l-x) and (u-x) for lower and upper bounded variables. */ 441 if (!bnk->inactive_idx) { 442 ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr); 443 ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr); 444 PetscFunctionReturn(0); 445 } 446 447 /* Shift the reduced Hessian matrix */ 448 if ((shift) && (bnk->pert > 0)) { 449 ierr = PetscObjectTypeCompare((PetscObject)tao->hessian, MATLMVM, &is_lmvm);CHKERRQ(ierr); 450 if (is_lmvm) { 451 ierr = MatShift(tao->hessian, bnk->pert);CHKERRQ(ierr); 452 } else { 453 ierr = PetscObjectTypeCompare((PetscObject)bnk->H_inactive, MATSUBMATRIX, &is_virtual);CHKERRQ(ierr); 454 if (!is_virtual) { 455 ierr = MatShift(bnk->H_inactive, bnk->pert);CHKERRQ(ierr); 456 if (bnk->H_inactive != bnk->Hpre_inactive) { 457 ierr = MatShift(bnk->Hpre_inactive, bnk->pert);CHKERRQ(ierr); 458 } 459 } 460 } 461 } 462 463 /* Solve the Newton system of equations */ 464 tao->ksp_its = 0; 465 ierr = VecSet(tao->stepdirection, 0.0);CHKERRQ(ierr); 466 ierr = KSPReset(tao->ksp);CHKERRQ(ierr); 467 ierr = KSPSetOperators(tao->ksp,bnk->H_inactive,bnk->Hpre_inactive);CHKERRQ(ierr); 468 ierr = VecCopy(bnk->unprojected_gradient, bnk->Gwork);CHKERRQ(ierr); 469 if (bnk->active_idx) { 470 ierr = VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr); 471 ierr = VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 472 } else { 473 bnk->G_inactive = bnk->unprojected_gradient; 474 bnk->X_inactive = tao->stepdirection; 475 } 476 if (bnk->is_nash || bnk->is_stcg || bnk->is_gltr) { 477 ierr = KSPCGSetRadius(tao->ksp,tao->trust);CHKERRQ(ierr); 478 ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr); 479 ierr = KSPGetIterationNumber(tao->ksp,&kspits);CHKERRQ(ierr); 480 tao->ksp_its+=kspits; 481 tao->ksp_tot_its+=kspits; 482 ierr = KSPCGGetNormD(tao->ksp,&bnk->dnorm);CHKERRQ(ierr); 483 484 if (0.0 == tao->trust) { 485 /* Radius was uninitialized; use the norm of the direction */ 486 if (bnk->dnorm > 0.0) { 487 tao->trust = bnk->dnorm; 488 489 /* Modify the radius if it is too large or small */ 490 tao->trust = PetscMax(tao->trust, bnk->min_radius); 491 tao->trust = PetscMin(tao->trust, bnk->max_radius); 492 } else { 493 /* The direction was bad; set radius to default value and re-solve 494 the trust-region subproblem to get a direction */ 495 tao->trust = tao->trust0; 496 497 /* Modify the radius if it is too large or small */ 498 tao->trust = PetscMax(tao->trust, bnk->min_radius); 499 tao->trust = PetscMin(tao->trust, bnk->max_radius); 500 501 ierr = KSPCGSetRadius(tao->ksp,tao->trust);CHKERRQ(ierr); 502 ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr); 503 ierr = KSPGetIterationNumber(tao->ksp,&kspits);CHKERRQ(ierr); 504 tao->ksp_its+=kspits; 505 tao->ksp_tot_its+=kspits; 506 ierr = KSPCGGetNormD(tao->ksp,&bnk->dnorm);CHKERRQ(ierr); 507 508 if (bnk->dnorm == 0.0) SETERRQ(PETSC_COMM_SELF,1, "Initial direction zero"); 509 } 510 } 511 } else { 512 ierr = KSPSolve(tao->ksp, bnk->G_inactive, bnk->X_inactive);CHKERRQ(ierr); 513 ierr = KSPGetIterationNumber(tao->ksp, &kspits);CHKERRQ(ierr); 514 tao->ksp_its += kspits; 515 tao->ksp_tot_its+=kspits; 516 } 517 /* Restore sub vectors back */ 518 if (bnk->active_idx) { 519 ierr = VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr); 520 ierr = VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 521 } 522 /* Make sure the safeguarded fall-back step is zero for actively bounded variables */ 523 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 524 ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr); 525 526 /* Record convergence reasons */ 527 ierr = KSPGetConvergedReason(tao->ksp, ksp_reason);CHKERRQ(ierr); 528 if (KSP_CONVERGED_ATOL == *ksp_reason) { 529 ++bnk->ksp_atol; 530 } else if (KSP_CONVERGED_RTOL == *ksp_reason) { 531 ++bnk->ksp_rtol; 532 } else if (KSP_CONVERGED_CG_CONSTRAINED == *ksp_reason) { 533 ++bnk->ksp_ctol; 534 } else if (KSP_CONVERGED_CG_NEG_CURVE == *ksp_reason) { 535 ++bnk->ksp_negc; 536 } else if (KSP_DIVERGED_DTOL == *ksp_reason) { 537 ++bnk->ksp_dtol; 538 } else if (KSP_DIVERGED_ITS == *ksp_reason) { 539 ++bnk->ksp_iter; 540 } else { 541 ++bnk->ksp_othr; 542 } 543 544 /* Make sure the BFGS preconditioner is healthy */ 545 if (bnk->M) { 546 ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr); 547 if ((KSP_DIVERGED_INDEFINITE_PC == *ksp_reason) && (bfgsUpdates > 0)) { 548 /* Preconditioner is numerically indefinite; reset the approximation. */ 549 ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr); 550 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 551 } 552 } 553 *step_type = BNK_NEWTON; 554 PetscFunctionReturn(0); 555 } 556 557 /*------------------------------------------------------------*/ 558 559 /* Routine for recomputing the predicted reduction for a given step vector */ 560 561 PetscErrorCode TaoBNKRecomputePred(Tao tao, Vec S, PetscReal *prered) 562 { 563 PetscErrorCode ierr; 564 TAO_BNK *bnk = (TAO_BNK *)tao->data; 565 566 PetscFunctionBegin; 567 /* Extract subvectors associated with the inactive set */ 568 if (bnk->active_idx){ 569 ierr = VecGetSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 570 ierr = VecGetSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr); 571 ierr = VecGetSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr); 572 } else { 573 bnk->X_inactive = tao->stepdirection; 574 bnk->inactive_work = bnk->Xwork; 575 bnk->G_inactive = bnk->Gwork; 576 } 577 /* Recompute the predicted decrease based on the quadratic model */ 578 ierr = MatMult(bnk->H_inactive, bnk->X_inactive, bnk->inactive_work);CHKERRQ(ierr); 579 ierr = VecAYPX(bnk->inactive_work, -0.5, bnk->G_inactive);CHKERRQ(ierr); 580 ierr = VecDot(bnk->inactive_work, bnk->X_inactive, prered);CHKERRQ(ierr); 581 /* Restore the sub vectors */ 582 if (bnk->active_idx){ 583 ierr = VecRestoreSubVector(tao->stepdirection, bnk->inactive_idx, &bnk->X_inactive);CHKERRQ(ierr); 584 ierr = VecRestoreSubVector(bnk->Xwork, bnk->inactive_idx, &bnk->inactive_work);CHKERRQ(ierr); 585 ierr = VecRestoreSubVector(bnk->Gwork, bnk->inactive_idx, &bnk->G_inactive);CHKERRQ(ierr); 586 } 587 PetscFunctionReturn(0); 588 } 589 590 /*------------------------------------------------------------*/ 591 592 /* Routine for ensuring that the Newton step is a descent direction. 593 594 The step direction falls back onto BFGS, scaled gradient and gradient steps 595 in the event that the Newton step fails the test. 596 */ 597 598 PetscErrorCode TaoBNKSafeguardStep(Tao tao, KSPConvergedReason ksp_reason, PetscInt *stepType) 599 { 600 PetscErrorCode ierr; 601 TAO_BNK *bnk = (TAO_BNK *)tao->data; 602 603 PetscReal gdx, e_min; 604 PetscInt bfgsUpdates; 605 606 PetscFunctionBegin; 607 switch (*stepType) { 608 case BNK_NEWTON: 609 ierr = VecDot(tao->stepdirection, tao->gradient, &gdx);CHKERRQ(ierr); 610 if ((gdx >= 0.0) || PetscIsInfOrNanReal(gdx)) { 611 /* Newton step is not descent or direction produced Inf or NaN 612 Update the perturbation for next time */ 613 if (bnk->pert <= 0.0) { 614 /* Initialize the perturbation */ 615 bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm)); 616 if (bnk->is_gltr) { 617 ierr = KSPCGGLTRGetMinEig(tao->ksp,&e_min);CHKERRQ(ierr); 618 bnk->pert = PetscMax(bnk->pert, -e_min); 619 } 620 } else { 621 /* Increase the perturbation */ 622 bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm)); 623 } 624 625 if (!bnk->M) { 626 /* We don't have the bfgs matrix around and updated 627 Must use gradient direction in this case */ 628 ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr); 629 *stepType = BNK_GRADIENT; 630 } else { 631 /* Attempt to use the BFGS direction */ 632 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 633 634 /* Check for success (descent direction) 635 NOTE: Negative gdx here means not a descent direction because 636 the fall-back step is missing a negative sign. */ 637 ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr); 638 if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) { 639 /* BFGS direction is not descent or direction produced not a number 640 We can assert bfgsUpdates > 1 in this case because 641 the first solve produces the scaled gradient direction, 642 which is guaranteed to be descent */ 643 644 /* Use steepest descent direction (scaled) */ 645 ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr); 646 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 647 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 648 649 *stepType = BNK_SCALED_GRADIENT; 650 } else { 651 ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr); 652 if (1 == bfgsUpdates) { 653 /* The first BFGS direction is always the scaled gradient */ 654 *stepType = BNK_SCALED_GRADIENT; 655 } else { 656 *stepType = BNK_BFGS; 657 } 658 } 659 } 660 /* Make sure the safeguarded fall-back step is zero for actively bounded variables */ 661 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 662 ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr); 663 } else { 664 /* Computed Newton step is descent */ 665 switch (ksp_reason) { 666 case KSP_DIVERGED_NANORINF: 667 case KSP_DIVERGED_BREAKDOWN: 668 case KSP_DIVERGED_INDEFINITE_MAT: 669 case KSP_DIVERGED_INDEFINITE_PC: 670 case KSP_CONVERGED_CG_NEG_CURVE: 671 /* Matrix or preconditioner is indefinite; increase perturbation */ 672 if (bnk->pert <= 0.0) { 673 /* Initialize the perturbation */ 674 bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm)); 675 if (bnk->is_gltr) { 676 ierr = KSPCGGLTRGetMinEig(tao->ksp, &e_min);CHKERRQ(ierr); 677 bnk->pert = PetscMax(bnk->pert, -e_min); 678 } 679 } else { 680 /* Increase the perturbation */ 681 bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm)); 682 } 683 break; 684 685 default: 686 /* Newton step computation is good; decrease perturbation */ 687 bnk->pert = PetscMin(bnk->psfac * bnk->pert, bnk->pmsfac * bnk->gnorm); 688 if (bnk->pert < bnk->pmin) { 689 bnk->pert = 0.0; 690 } 691 break; 692 } 693 *stepType = BNK_NEWTON; 694 } 695 break; 696 697 case BNK_BFGS: 698 /* Check for success (descent direction) */ 699 ierr = VecDot(tao->stepdirection, tao->gradient, &gdx);CHKERRQ(ierr); 700 if (gdx >= 0 || PetscIsInfOrNanReal(gdx)) { 701 /* Step is not descent or solve was not successful 702 Use steepest descent direction (scaled) */ 703 ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr); 704 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 705 ierr = MatSolve(bnk->M, tao->gradient, tao->stepdirection);CHKERRQ(ierr); 706 ierr = VecScale(tao->stepdirection,-1.0);CHKERRQ(ierr); 707 ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr); 708 *stepType = BNK_SCALED_GRADIENT; 709 } else { 710 *stepType = BNK_BFGS; 711 } 712 break; 713 714 case BNK_SCALED_GRADIENT: 715 break; 716 717 default: 718 break; 719 } 720 721 PetscFunctionReturn(0); 722 } 723 724 /*------------------------------------------------------------*/ 725 726 /* Routine for performing a bound-projected More-Thuente line search. 727 728 Includes fallbacks to BFGS, scaled gradient, and unscaled gradient steps if the 729 Newton step does not produce a valid step length. 730 */ 731 732 PetscErrorCode TaoBNKPerformLineSearch(Tao tao, PetscInt *stepType, PetscReal *steplen, TaoLineSearchConvergedReason *reason) 733 { 734 TAO_BNK *bnk = (TAO_BNK *)tao->data; 735 PetscErrorCode ierr; 736 TaoLineSearchConvergedReason ls_reason; 737 738 PetscReal e_min, gdx; 739 PetscInt bfgsUpdates; 740 741 PetscFunctionBegin; 742 /* Perform the linesearch */ 743 ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);CHKERRQ(ierr); 744 ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr); 745 746 while (ls_reason != TAOLINESEARCH_SUCCESS && ls_reason != TAOLINESEARCH_SUCCESS_USER && *stepType != BNK_SCALED_GRADIENT && *stepType != BNK_GRADIENT) { 747 /* Linesearch failed, revert solution */ 748 bnk->f = bnk->fold; 749 ierr = VecCopy(bnk->Xold, tao->solution);CHKERRQ(ierr); 750 ierr = VecCopy(bnk->unprojected_gradient_old, bnk->unprojected_gradient);CHKERRQ(ierr); 751 752 switch(*stepType) { 753 case BNK_NEWTON: 754 /* Failed to obtain acceptable iterate with Newton step 755 Update the perturbation for next time */ 756 if (bnk->pert <= 0.0) { 757 /* Initialize the perturbation */ 758 bnk->pert = PetscMin(bnk->imax, PetscMax(bnk->imin, bnk->imfac * bnk->gnorm)); 759 if (bnk->is_gltr) { 760 ierr = KSPCGGLTRGetMinEig(tao->ksp,&e_min);CHKERRQ(ierr); 761 bnk->pert = PetscMax(bnk->pert, -e_min); 762 } 763 } else { 764 /* Increase the perturbation */ 765 bnk->pert = PetscMin(bnk->pmax, PetscMax(bnk->pgfac * bnk->pert, bnk->pmgfac * bnk->gnorm)); 766 } 767 768 if (!bnk->M) { 769 /* We don't have the bfgs matrix around and being updated 770 Must use gradient direction in this case */ 771 ierr = VecCopy(bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 772 *stepType = BNK_GRADIENT; 773 } else { 774 /* Attempt to use the BFGS direction */ 775 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 776 /* Check for success (descent direction) 777 NOTE: Negative gdx means not a descent direction because the step here is missing a negative sign. */ 778 ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr); 779 if ((gdx <= 0.0) || PetscIsInfOrNanReal(gdx)) { 780 /* BFGS direction is not descent or direction produced not a number 781 We can assert bfgsUpdates > 1 in this case 782 Use steepest descent direction (scaled) */ 783 ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr); 784 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 785 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 786 787 bfgsUpdates = 1; 788 *stepType = BNK_SCALED_GRADIENT; 789 } else { 790 ierr = MatLMVMGetUpdateCount(bnk->M, &bfgsUpdates);CHKERRQ(ierr); 791 if (1 == bfgsUpdates) { 792 /* The first BFGS direction is always the scaled gradient */ 793 *stepType = BNK_SCALED_GRADIENT; 794 } else { 795 *stepType = BNK_BFGS; 796 } 797 } 798 } 799 break; 800 801 case BNK_BFGS: 802 /* Can only enter if pc_type == BNK_PC_BFGS 803 Failed to obtain acceptable iterate with BFGS step 804 Attempt to use the scaled gradient direction */ 805 ierr = MatLMVMReset(bnk->M, PETSC_FALSE);CHKERRQ(ierr); 806 ierr = MatLMVMUpdate(bnk->M, tao->solution, bnk->unprojected_gradient);CHKERRQ(ierr); 807 ierr = MatSolve(bnk->M, bnk->unprojected_gradient, tao->stepdirection);CHKERRQ(ierr); 808 809 bfgsUpdates = 1; 810 *stepType = BNK_SCALED_GRADIENT; 811 break; 812 } 813 /* Make sure the safeguarded fall-back step is zero for actively bounded variables */ 814 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 815 ierr = TaoBNKBoundStep(tao, bnk->as_type, tao->stepdirection);CHKERRQ(ierr); 816 817 /* Perform one last line search with the fall-back step */ 818 ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &bnk->f, bnk->unprojected_gradient, tao->stepdirection, steplen, &ls_reason);CHKERRQ(ierr); 819 ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr); 820 } 821 *reason = ls_reason; 822 PetscFunctionReturn(0); 823 } 824 825 /*------------------------------------------------------------*/ 826 827 /* Routine for updating the trust radius. 828 829 Function features three different update methods: 830 1) Line-search step length based 831 2) Predicted decrease on the CG quadratic model 832 3) Interpolation 833 */ 834 835 PetscErrorCode TaoBNKUpdateTrustRadius(Tao tao, PetscReal prered, PetscReal actred, PetscInt updateType, PetscInt stepType, PetscBool *accept) 836 { 837 TAO_BNK *bnk = (TAO_BNK *)tao->data; 838 PetscErrorCode ierr; 839 840 PetscReal step, kappa; 841 PetscReal gdx, tau_1, tau_2, tau_min, tau_max; 842 843 PetscFunctionBegin; 844 /* Update trust region radius */ 845 *accept = PETSC_FALSE; 846 switch(updateType) { 847 case BNK_UPDATE_STEP: 848 *accept = PETSC_TRUE; /* always accept here because line search succeeded */ 849 if (stepType == BNK_NEWTON) { 850 ierr = TaoLineSearchGetStepLength(tao->linesearch, &step);CHKERRQ(ierr); 851 if (step < bnk->nu1) { 852 /* Very bad step taken; reduce radius */ 853 tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust); 854 } else if (step < bnk->nu2) { 855 /* Reasonably bad step taken; reduce radius */ 856 tao->trust = bnk->omega2 * PetscMin(bnk->dnorm, tao->trust); 857 } else if (step < bnk->nu3) { 858 /* Reasonable step was taken; leave radius alone */ 859 if (bnk->omega3 < 1.0) { 860 tao->trust = bnk->omega3 * PetscMin(bnk->dnorm, tao->trust); 861 } else if (bnk->omega3 > 1.0) { 862 tao->trust = PetscMax(bnk->omega3 * bnk->dnorm, tao->trust); 863 } 864 } else if (step < bnk->nu4) { 865 /* Full step taken; increase the radius */ 866 tao->trust = PetscMax(bnk->omega4 * bnk->dnorm, tao->trust); 867 } else { 868 /* More than full step taken; increase the radius */ 869 tao->trust = PetscMax(bnk->omega5 * bnk->dnorm, tao->trust); 870 } 871 } else { 872 /* Newton step was not good; reduce the radius */ 873 tao->trust = bnk->omega1 * PetscMin(bnk->dnorm, tao->trust); 874 } 875 break; 876 877 case BNK_UPDATE_REDUCTION: 878 if (stepType == BNK_NEWTON) { 879 if ((prered < 0.0) || PetscIsInfOrNanReal(prered)) { 880 /* The predicted reduction has the wrong sign. This cannot 881 happen in infinite precision arithmetic. Step should 882 be rejected! */ 883 tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm); 884 } else { 885 if (PetscIsInfOrNanReal(actred)) { 886 tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm); 887 } else { 888 if ((PetscAbsScalar(actred) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon) && (PetscAbsScalar(prered) <= PetscMax(1.0, PetscAbsScalar(bnk->f))*bnk->epsilon)) { 889 kappa = 1.0; 890 } else { 891 kappa = actred / prered; 892 } 893 /* Accept or reject the step and update radius */ 894 if (kappa < bnk->eta1) { 895 /* Reject the step */ 896 tao->trust = bnk->alpha1 * PetscMin(tao->trust, bnk->dnorm); 897 } else { 898 /* Accept the step */ 899 *accept = PETSC_TRUE; 900 /* Update the trust region radius only if the computed step is at the trust radius boundary */ 901 if (bnk->dnorm == tao->trust) { 902 if (kappa < bnk->eta2) { 903 /* Marginal bad step */ 904 tao->trust = bnk->alpha2 * tao->trust; 905 } else if (kappa < bnk->eta3) { 906 /* Reasonable step */ 907 tao->trust = bnk->alpha3 * tao->trust; 908 } else if (kappa < bnk->eta4) { 909 /* Good step */ 910 tao->trust = bnk->alpha4 * tao->trust; 911 } else { 912 /* Very good step */ 913 tao->trust = bnk->alpha5 * tao->trust; 914 } 915 } 916 } 917 } 918 } 919 } else { 920 /* Newton step was not good; reduce the radius */ 921 tao->trust = bnk->alpha1 * PetscMin(bnk->dnorm, tao->trust); 922 } 923 break; 924 925 default: 926 if (stepType == BNK_NEWTON) { 927 if (prered < 0.0) { 928 /* The predicted reduction has the wrong sign. This cannot */ 929 /* happen in infinite precision arithmetic. Step should */ 930 /* be rejected! */ 931 tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm); 932 } else { 933 if (PetscIsInfOrNanReal(actred)) { 934 tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm); 935 } else { 936 if ((PetscAbsScalar(actred) <= bnk->epsilon) && (PetscAbsScalar(prered) <= bnk->epsilon)) { 937 kappa = 1.0; 938 } else { 939 kappa = actred / prered; 940 } 941 942 ierr = VecDot(tao->gradient, tao->stepdirection, &gdx);CHKERRQ(ierr); 943 tau_1 = bnk->theta * gdx / (bnk->theta * gdx - (1.0 - bnk->theta) * prered + actred); 944 tau_2 = bnk->theta * gdx / (bnk->theta * gdx + (1.0 + bnk->theta) * prered - actred); 945 tau_min = PetscMin(tau_1, tau_2); 946 tau_max = PetscMax(tau_1, tau_2); 947 948 if (kappa >= 1.0 - bnk->mu1) { 949 /* Great agreement */ 950 *accept = PETSC_TRUE; 951 if (tau_max < 1.0) { 952 tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm); 953 } else if (tau_max > bnk->gamma4) { 954 tao->trust = PetscMax(tao->trust, bnk->gamma4 * bnk->dnorm); 955 } else { 956 tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm); 957 } 958 } else if (kappa >= 1.0 - bnk->mu2) { 959 /* Good agreement */ 960 *accept = PETSC_TRUE; 961 if (tau_max < bnk->gamma2) { 962 tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm); 963 } else if (tau_max > bnk->gamma3) { 964 tao->trust = PetscMax(tao->trust, bnk->gamma3 * bnk->dnorm); 965 } else if (tau_max < 1.0) { 966 tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm); 967 } else { 968 tao->trust = PetscMax(tao->trust, tau_max * bnk->dnorm); 969 } 970 } else { 971 /* Not good agreement */ 972 if (tau_min > 1.0) { 973 tao->trust = bnk->gamma2 * PetscMin(tao->trust, bnk->dnorm); 974 } else if (tau_max < bnk->gamma1) { 975 tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm); 976 } else if ((tau_min < bnk->gamma1) && (tau_max >= 1.0)) { 977 tao->trust = bnk->gamma1 * PetscMin(tao->trust, bnk->dnorm); 978 } else if ((tau_1 >= bnk->gamma1) && (tau_1 < 1.0) && ((tau_2 < bnk->gamma1) || (tau_2 >= 1.0))) { 979 tao->trust = tau_1 * PetscMin(tao->trust, bnk->dnorm); 980 } else if ((tau_2 >= bnk->gamma1) && (tau_2 < 1.0) && ((tau_1 < bnk->gamma1) || (tau_2 >= 1.0))) { 981 tao->trust = tau_2 * PetscMin(tao->trust, bnk->dnorm); 982 } else { 983 tao->trust = tau_max * PetscMin(tao->trust, bnk->dnorm); 984 } 985 } 986 } 987 } 988 } else { 989 /* Newton step was not good; reduce the radius */ 990 tao->trust = bnk->gamma1 * PetscMin(bnk->dnorm, tao->trust); 991 } 992 break; 993 } 994 /* Make sure the radius does not violate min and max settings */ 995 tao->trust = PetscMin(tao->trust, bnk->max_radius); 996 tao->trust = PetscMax(tao->trust, bnk->min_radius); 997 PetscFunctionReturn(0); 998 } 999 1000 /* ---------------------------------------------------------- */ 1001 1002 PetscErrorCode TaoBNKAddStepCounts(Tao tao, PetscInt stepType) 1003 { 1004 TAO_BNK *bnk = (TAO_BNK *)tao->data; 1005 1006 PetscFunctionBegin; 1007 switch (stepType) { 1008 case BNK_NEWTON: 1009 ++bnk->newt; 1010 break; 1011 case BNK_BFGS: 1012 ++bnk->bfgs; 1013 break; 1014 case BNK_SCALED_GRADIENT: 1015 ++bnk->sgrad; 1016 break; 1017 case BNK_GRADIENT: 1018 ++bnk->grad; 1019 break; 1020 default: 1021 break; 1022 } 1023 PetscFunctionReturn(0); 1024 } 1025 1026 /* ---------------------------------------------------------- */ 1027 1028 PetscErrorCode TaoSetUp_BNK(Tao tao) 1029 { 1030 TAO_BNK *bnk = (TAO_BNK *)tao->data; 1031 PetscErrorCode ierr; 1032 PetscInt i; 1033 1034 PetscFunctionBegin; 1035 if (!tao->gradient) { 1036 ierr = VecDuplicate(tao->solution,&tao->gradient);CHKERRQ(ierr); 1037 } 1038 if (!tao->stepdirection) { 1039 ierr = VecDuplicate(tao->solution,&tao->stepdirection);CHKERRQ(ierr); 1040 } 1041 if (!bnk->W) { 1042 ierr = VecDuplicate(tao->solution,&bnk->W);CHKERRQ(ierr); 1043 } 1044 if (!bnk->Xold) { 1045 ierr = VecDuplicate(tao->solution,&bnk->Xold);CHKERRQ(ierr); 1046 } 1047 if (!bnk->Gold) { 1048 ierr = VecDuplicate(tao->solution,&bnk->Gold);CHKERRQ(ierr); 1049 } 1050 if (!bnk->Xwork) { 1051 ierr = VecDuplicate(tao->solution,&bnk->Xwork);CHKERRQ(ierr); 1052 } 1053 if (!bnk->Gwork) { 1054 ierr = VecDuplicate(tao->solution,&bnk->Gwork);CHKERRQ(ierr); 1055 } 1056 if (!bnk->unprojected_gradient) { 1057 ierr = VecDuplicate(tao->solution,&bnk->unprojected_gradient);CHKERRQ(ierr); 1058 } 1059 if (!bnk->unprojected_gradient_old) { 1060 ierr = VecDuplicate(tao->solution,&bnk->unprojected_gradient_old);CHKERRQ(ierr); 1061 } 1062 if (!bnk->Diag_min) { 1063 ierr = VecDuplicate(tao->solution,&bnk->Diag_min);CHKERRQ(ierr); 1064 } 1065 if (!bnk->Diag_max) { 1066 ierr = VecDuplicate(tao->solution,&bnk->Diag_max);CHKERRQ(ierr); 1067 } 1068 if (bnk->max_cg_its > 0) { 1069 /* Ensure that the important common vectors are shared between BNK and embedded BNCG */ 1070 bnk->bncg_ctx = (TAO_BNCG *)bnk->bncg->data; 1071 ierr = PetscObjectReference((PetscObject)(bnk->unprojected_gradient_old));CHKERRQ(ierr); 1072 ierr = VecDestroy(&bnk->bncg_ctx->unprojected_gradient_old);CHKERRQ(ierr); 1073 bnk->bncg_ctx->unprojected_gradient_old = bnk->unprojected_gradient_old; 1074 ierr = PetscObjectReference((PetscObject)(bnk->unprojected_gradient));CHKERRQ(ierr); 1075 ierr = VecDestroy(&bnk->bncg_ctx->unprojected_gradient);CHKERRQ(ierr); 1076 bnk->bncg_ctx->unprojected_gradient = bnk->unprojected_gradient; 1077 ierr = PetscObjectReference((PetscObject)(bnk->Gold));CHKERRQ(ierr); 1078 ierr = VecDestroy(&bnk->bncg_ctx->G_old);CHKERRQ(ierr); 1079 bnk->bncg_ctx->G_old = bnk->Gold; 1080 ierr = PetscObjectReference((PetscObject)(tao->gradient));CHKERRQ(ierr); 1081 ierr = VecDestroy(&bnk->bncg->gradient);CHKERRQ(ierr); 1082 bnk->bncg->gradient = tao->gradient; 1083 ierr = PetscObjectReference((PetscObject)(tao->stepdirection));CHKERRQ(ierr); 1084 ierr = VecDestroy(&bnk->bncg->stepdirection);CHKERRQ(ierr); 1085 bnk->bncg->stepdirection = tao->stepdirection; 1086 ierr = TaoSetInitialVector(bnk->bncg, tao->solution);CHKERRQ(ierr); 1087 /* Copy over some settings from BNK into BNCG */ 1088 ierr = TaoSetMaximumIterations(bnk->bncg, bnk->max_cg_its);CHKERRQ(ierr); 1089 ierr = TaoSetTolerances(bnk->bncg, tao->gatol, tao->grtol, tao->gttol);CHKERRQ(ierr); 1090 ierr = TaoSetFunctionLowerBound(bnk->bncg, tao->fmin);CHKERRQ(ierr); 1091 ierr = TaoSetConvergenceTest(bnk->bncg, tao->ops->convergencetest, tao->cnvP);CHKERRQ(ierr); 1092 ierr = TaoSetObjectiveRoutine(bnk->bncg, tao->ops->computeobjective, tao->user_objP);CHKERRQ(ierr); 1093 ierr = TaoSetGradientRoutine(bnk->bncg, tao->ops->computegradient, tao->user_gradP);CHKERRQ(ierr); 1094 ierr = TaoSetObjectiveAndGradientRoutine(bnk->bncg, tao->ops->computeobjectiveandgradient, tao->user_objgradP);CHKERRQ(ierr); 1095 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)tao, (PetscObject)(bnk->bncg));CHKERRQ(ierr); 1096 for (i=0; i<tao->numbermonitors; ++i) { 1097 ierr = TaoSetMonitor(bnk->bncg, tao->monitor[i], tao->monitorcontext[i], tao->monitordestroy[i]);CHKERRQ(ierr); 1098 ierr = PetscObjectReference((PetscObject)(tao->monitorcontext[i]));CHKERRQ(ierr); 1099 } 1100 } 1101 bnk->X_inactive = 0; 1102 bnk->G_inactive = 0; 1103 bnk->inactive_work = 0; 1104 bnk->active_work = 0; 1105 bnk->inactive_idx = 0; 1106 bnk->active_idx = 0; 1107 bnk->active_lower = 0; 1108 bnk->active_upper = 0; 1109 bnk->active_fixed = 0; 1110 bnk->M = 0; 1111 bnk->H_inactive = 0; 1112 bnk->Hpre_inactive = 0; 1113 PetscFunctionReturn(0); 1114 } 1115 1116 /*------------------------------------------------------------*/ 1117 1118 PetscErrorCode TaoDestroy_BNK(Tao tao) 1119 { 1120 TAO_BNK *bnk = (TAO_BNK *)tao->data; 1121 PetscErrorCode ierr; 1122 1123 PetscFunctionBegin; 1124 if (tao->setupcalled) { 1125 ierr = VecDestroy(&bnk->W);CHKERRQ(ierr); 1126 ierr = VecDestroy(&bnk->Xold);CHKERRQ(ierr); 1127 ierr = VecDestroy(&bnk->Gold);CHKERRQ(ierr); 1128 ierr = VecDestroy(&bnk->Xwork);CHKERRQ(ierr); 1129 ierr = VecDestroy(&bnk->Gwork);CHKERRQ(ierr); 1130 ierr = VecDestroy(&bnk->unprojected_gradient);CHKERRQ(ierr); 1131 ierr = VecDestroy(&bnk->unprojected_gradient_old);CHKERRQ(ierr); 1132 ierr = VecDestroy(&bnk->Diag_min);CHKERRQ(ierr); 1133 ierr = VecDestroy(&bnk->Diag_max);CHKERRQ(ierr); 1134 } 1135 ierr = ISDestroy(&bnk->active_lower);CHKERRQ(ierr); 1136 ierr = ISDestroy(&bnk->active_upper);CHKERRQ(ierr); 1137 ierr = ISDestroy(&bnk->active_fixed);CHKERRQ(ierr); 1138 ierr = ISDestroy(&bnk->active_idx);CHKERRQ(ierr); 1139 ierr = ISDestroy(&bnk->inactive_idx);CHKERRQ(ierr); 1140 ierr = MatDestroy(&bnk->Hpre_inactive);CHKERRQ(ierr); 1141 ierr = MatDestroy(&bnk->H_inactive);CHKERRQ(ierr); 1142 ierr = TaoDestroy(&bnk->bncg);CHKERRQ(ierr); 1143 ierr = PetscFree(tao->data);CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 /*------------------------------------------------------------*/ 1148 1149 PetscErrorCode TaoSetFromOptions_BNK(PetscOptionItems *PetscOptionsObject,Tao tao) 1150 { 1151 TAO_BNK *bnk = (TAO_BNK *)tao->data; 1152 PetscErrorCode ierr; 1153 KSPType ksp_type; 1154 1155 PetscFunctionBegin; 1156 ierr = PetscOptionsHead(PetscOptionsObject,"Newton-Krylov method for bound constrained optimization");CHKERRQ(ierr); 1157 ierr = PetscOptionsEList("-tao_bnk_init_type", "radius initialization type", "", BNK_INIT, BNK_INIT_TYPES, BNK_INIT[bnk->init_type], &bnk->init_type, 0);CHKERRQ(ierr); 1158 ierr = PetscOptionsEList("-tao_bnk_update_type", "radius update type", "", BNK_UPDATE, BNK_UPDATE_TYPES, BNK_UPDATE[bnk->update_type], &bnk->update_type, 0);CHKERRQ(ierr); 1159 ierr = PetscOptionsEList("-tao_bnk_as_type", "active set estimation method", "", BNK_AS, BNK_AS_TYPES, BNK_AS[bnk->as_type], &bnk->as_type, 0);CHKERRQ(ierr); 1160 ierr = PetscOptionsReal("-tao_bnk_sval", "(developer) Hessian perturbation starting value", "", bnk->sval, &bnk->sval,NULL);CHKERRQ(ierr); 1161 ierr = PetscOptionsReal("-tao_bnk_imin", "(developer) minimum initial Hessian perturbation", "", bnk->imin, &bnk->imin,NULL);CHKERRQ(ierr); 1162 ierr = PetscOptionsReal("-tao_bnk_imax", "(developer) maximum initial Hessian perturbation", "", bnk->imax, &bnk->imax,NULL);CHKERRQ(ierr); 1163 ierr = PetscOptionsReal("-tao_bnk_imfac", "(developer) initial merit factor for Hessian perturbation", "", bnk->imfac, &bnk->imfac,NULL);CHKERRQ(ierr); 1164 ierr = PetscOptionsReal("-tao_bnk_pmin", "(developer) minimum Hessian perturbation", "", bnk->pmin, &bnk->pmin,NULL);CHKERRQ(ierr); 1165 ierr = PetscOptionsReal("-tao_bnk_pmax", "(developer) maximum Hessian perturbation", "", bnk->pmax, &bnk->pmax,NULL);CHKERRQ(ierr); 1166 ierr = PetscOptionsReal("-tao_bnk_pgfac", "(developer) Hessian perturbation growth factor", "", bnk->pgfac, &bnk->pgfac,NULL);CHKERRQ(ierr); 1167 ierr = PetscOptionsReal("-tao_bnk_psfac", "(developer) Hessian perturbation shrink factor", "", bnk->psfac, &bnk->psfac,NULL);CHKERRQ(ierr); 1168 ierr = PetscOptionsReal("-tao_bnk_pmgfac", "(developer) merit growth factor for Hessian perturbation", "", bnk->pmgfac, &bnk->pmgfac,NULL);CHKERRQ(ierr); 1169 ierr = PetscOptionsReal("-tao_bnk_pmsfac", "(developer) merit shrink factor for Hessian perturbation", "", bnk->pmsfac, &bnk->pmsfac,NULL);CHKERRQ(ierr); 1170 ierr = PetscOptionsReal("-tao_bnk_eta1", "(developer) threshold for rejecting step (-tao_bnk_update_type reduction)", "", bnk->eta1, &bnk->eta1,NULL);CHKERRQ(ierr); 1171 ierr = PetscOptionsReal("-tao_bnk_eta2", "(developer) threshold for accepting marginal step (-tao_bnk_update_type reduction)", "", bnk->eta2, &bnk->eta2,NULL);CHKERRQ(ierr); 1172 ierr = PetscOptionsReal("-tao_bnk_eta3", "(developer) threshold for accepting reasonable step (-tao_bnk_update_type reduction)", "", bnk->eta3, &bnk->eta3,NULL);CHKERRQ(ierr); 1173 ierr = PetscOptionsReal("-tao_bnk_eta4", "(developer) threshold for accepting good step (-tao_bnk_update_type reduction)", "", bnk->eta4, &bnk->eta4,NULL);CHKERRQ(ierr); 1174 ierr = PetscOptionsReal("-tao_bnk_alpha1", "(developer) radius reduction factor for rejected step (-tao_bnk_update_type reduction)", "", bnk->alpha1, &bnk->alpha1,NULL);CHKERRQ(ierr); 1175 ierr = PetscOptionsReal("-tao_bnk_alpha2", "(developer) radius reduction factor for marginally accepted bad step (-tao_bnk_update_type reduction)", "", bnk->alpha2, &bnk->alpha2,NULL);CHKERRQ(ierr); 1176 ierr = PetscOptionsReal("-tao_bnk_alpha3", "(developer) radius increase factor for reasonable accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha3, &bnk->alpha3,NULL);CHKERRQ(ierr); 1177 ierr = PetscOptionsReal("-tao_bnk_alpha4", "(developer) radius increase factor for good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha4, &bnk->alpha4,NULL);CHKERRQ(ierr); 1178 ierr = PetscOptionsReal("-tao_bnk_alpha5", "(developer) radius increase factor for very good accepted step (-tao_bnk_update_type reduction)", "", bnk->alpha5, &bnk->alpha5,NULL);CHKERRQ(ierr); 1179 ierr = PetscOptionsReal("-tao_bnk_nu1", "(developer) threshold for small line-search step length (-tao_bnk_update_type step)", "", bnk->nu1, &bnk->nu1,NULL);CHKERRQ(ierr); 1180 ierr = PetscOptionsReal("-tao_bnk_nu2", "(developer) threshold for reasonable line-search step length (-tao_bnk_update_type step)", "", bnk->nu2, &bnk->nu2,NULL);CHKERRQ(ierr); 1181 ierr = PetscOptionsReal("-tao_bnk_nu3", "(developer) threshold for large line-search step length (-tao_bnk_update_type step)", "", bnk->nu3, &bnk->nu3,NULL);CHKERRQ(ierr); 1182 ierr = PetscOptionsReal("-tao_bnk_nu4", "(developer) threshold for very large line-search step length (-tao_bnk_update_type step)", "", bnk->nu4, &bnk->nu4,NULL);CHKERRQ(ierr); 1183 ierr = PetscOptionsReal("-tao_bnk_omega1", "(developer) radius reduction factor for very small line-search step length (-tao_bnk_update_type step)", "", bnk->omega1, &bnk->omega1,NULL);CHKERRQ(ierr); 1184 ierr = PetscOptionsReal("-tao_bnk_omega2", "(developer) radius reduction factor for small line-search step length (-tao_bnk_update_type step)", "", bnk->omega2, &bnk->omega2,NULL);CHKERRQ(ierr); 1185 ierr = PetscOptionsReal("-tao_bnk_omega3", "(developer) radius factor for decent line-search step length (-tao_bnk_update_type step)", "", bnk->omega3, &bnk->omega3,NULL);CHKERRQ(ierr); 1186 ierr = PetscOptionsReal("-tao_bnk_omega4", "(developer) radius increase factor for large line-search step length (-tao_bnk_update_type step)", "", bnk->omega4, &bnk->omega4,NULL);CHKERRQ(ierr); 1187 ierr = PetscOptionsReal("-tao_bnk_omega5", "(developer) radius increase factor for very large line-search step length (-tao_bnk_update_type step)", "", bnk->omega5, &bnk->omega5,NULL);CHKERRQ(ierr); 1188 ierr = PetscOptionsReal("-tao_bnk_mu1_i", "(developer) threshold for accepting very good step (-tao_bnk_init_type interpolation)", "", bnk->mu1_i, &bnk->mu1_i,NULL);CHKERRQ(ierr); 1189 ierr = PetscOptionsReal("-tao_bnk_mu2_i", "(developer) threshold for accepting good step (-tao_bnk_init_type interpolation)", "", bnk->mu2_i, &bnk->mu2_i,NULL);CHKERRQ(ierr); 1190 ierr = PetscOptionsReal("-tao_bnk_gamma1_i", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma1_i, &bnk->gamma1_i,NULL);CHKERRQ(ierr); 1191 ierr = PetscOptionsReal("-tao_bnk_gamma2_i", "(developer) radius reduction factor for rejected bad step (-tao_bnk_init_type interpolation)", "", bnk->gamma2_i, &bnk->gamma2_i,NULL);CHKERRQ(ierr); 1192 ierr = PetscOptionsReal("-tao_bnk_gamma3_i", "(developer) radius increase factor for accepted good step (-tao_bnk_init_type interpolation)", "", bnk->gamma3_i, &bnk->gamma3_i,NULL);CHKERRQ(ierr); 1193 ierr = PetscOptionsReal("-tao_bnk_gamma4_i", "(developer) radius increase factor for accepted very good step (-tao_bnk_init_type interpolation)", "", bnk->gamma4_i, &bnk->gamma4_i,NULL);CHKERRQ(ierr); 1194 ierr = PetscOptionsReal("-tao_bnk_theta_i", "(developer) trust region interpolation factor (-tao_bnk_init_type interpolation)", "", bnk->theta_i, &bnk->theta_i,NULL);CHKERRQ(ierr); 1195 ierr = PetscOptionsReal("-tao_bnk_mu1", "(developer) threshold for accepting very good step (-tao_bnk_update_type interpolation)", "", bnk->mu1, &bnk->mu1,NULL);CHKERRQ(ierr); 1196 ierr = PetscOptionsReal("-tao_bnk_mu2", "(developer) threshold for accepting good step (-tao_bnk_update_type interpolation)", "", bnk->mu2, &bnk->mu2,NULL);CHKERRQ(ierr); 1197 ierr = PetscOptionsReal("-tao_bnk_gamma1", "(developer) radius reduction factor for rejected very bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma1, &bnk->gamma1,NULL);CHKERRQ(ierr); 1198 ierr = PetscOptionsReal("-tao_bnk_gamma2", "(developer) radius reduction factor for rejected bad step (-tao_bnk_update_type interpolation)", "", bnk->gamma2, &bnk->gamma2,NULL);CHKERRQ(ierr); 1199 ierr = PetscOptionsReal("-tao_bnk_gamma3", "(developer) radius increase factor for accepted good step (-tao_bnk_update_type interpolation)", "", bnk->gamma3, &bnk->gamma3,NULL);CHKERRQ(ierr); 1200 ierr = PetscOptionsReal("-tao_bnk_gamma4", "(developer) radius increase factor for accepted very good step (-tao_bnk_update_type interpolation)", "", bnk->gamma4, &bnk->gamma4,NULL);CHKERRQ(ierr); 1201 ierr = PetscOptionsReal("-tao_bnk_theta", "(developer) trust region interpolation factor (-tao_bnk_update_type interpolation)", "", bnk->theta, &bnk->theta,NULL);CHKERRQ(ierr); 1202 ierr = PetscOptionsReal("-tao_bnk_min_radius", "(developer) lower bound on initial radius", "", bnk->min_radius, &bnk->min_radius,NULL);CHKERRQ(ierr); 1203 ierr = PetscOptionsReal("-tao_bnk_max_radius", "(developer) upper bound on radius", "", bnk->max_radius, &bnk->max_radius,NULL);CHKERRQ(ierr); 1204 ierr = PetscOptionsReal("-tao_bnk_epsilon", "(developer) tolerance used when computing actual and predicted reduction", "", bnk->epsilon, &bnk->epsilon,NULL);CHKERRQ(ierr); 1205 ierr = PetscOptionsReal("-tao_bnk_as_tol", "(developer) initial tolerance used when estimating actively bounded variables", "", bnk->as_tol, &bnk->as_tol,NULL);CHKERRQ(ierr); 1206 ierr = PetscOptionsReal("-tao_bnk_as_step", "(developer) step length used when estimating actively bounded variables", "", bnk->as_step, &bnk->as_step,NULL);CHKERRQ(ierr); 1207 ierr = PetscOptionsInt("-tao_bnk_max_cg_its", "number of BNCG iterations to take for each Newton step", "", bnk->max_cg_its, &bnk->max_cg_its,NULL);CHKERRQ(ierr); 1208 ierr = PetscOptionsTail();CHKERRQ(ierr); 1209 ierr = TaoSetFromOptions(bnk->bncg);CHKERRQ(ierr); 1210 ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr); 1211 ierr = KSPSetFromOptions(tao->ksp);CHKERRQ(ierr); 1212 ierr = KSPGetType(tao->ksp,&ksp_type);CHKERRQ(ierr); 1213 ierr = PetscStrcmp(ksp_type,KSPCGNASH,&bnk->is_nash);CHKERRQ(ierr); 1214 ierr = PetscStrcmp(ksp_type,KSPCGSTCG,&bnk->is_stcg);CHKERRQ(ierr); 1215 ierr = PetscStrcmp(ksp_type,KSPCGGLTR,&bnk->is_gltr);CHKERRQ(ierr); 1216 PetscFunctionReturn(0); 1217 } 1218 1219 /*------------------------------------------------------------*/ 1220 1221 PetscErrorCode TaoView_BNK(Tao tao, PetscViewer viewer) 1222 { 1223 TAO_BNK *bnk = (TAO_BNK *)tao->data; 1224 PetscInt nrejects; 1225 PetscBool isascii; 1226 PetscErrorCode ierr; 1227 1228 PetscFunctionBegin; 1229 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 1230 if (isascii) { 1231 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 1232 if (bnk->M) { 1233 ierr = MatLMVMGetRejectCount(bnk->M,&nrejects);CHKERRQ(ierr); 1234 ierr = PetscViewerASCIIPrintf(viewer, "Rejected BFGS updates: %D\n",nrejects);CHKERRQ(ierr); 1235 } 1236 ierr = PetscViewerASCIIPrintf(viewer, "CG steps: %D\n", bnk->tot_cg_its);CHKERRQ(ierr); 1237 ierr = PetscViewerASCIIPrintf(viewer, "Newton steps: %D\n", bnk->newt);CHKERRQ(ierr); 1238 if (bnk->M) { 1239 ierr = PetscViewerASCIIPrintf(viewer, "BFGS steps: %D\n", bnk->bfgs);CHKERRQ(ierr); 1240 } 1241 ierr = PetscViewerASCIIPrintf(viewer, "Scaled gradient steps: %D\n", bnk->sgrad);CHKERRQ(ierr); 1242 ierr = PetscViewerASCIIPrintf(viewer, "Gradient steps: %D\n", bnk->grad);CHKERRQ(ierr); 1243 ierr = PetscViewerASCIIPrintf(viewer, "KSP termination reasons:\n");CHKERRQ(ierr); 1244 ierr = PetscViewerASCIIPrintf(viewer, " atol: %D\n", bnk->ksp_atol);CHKERRQ(ierr); 1245 ierr = PetscViewerASCIIPrintf(viewer, " rtol: %D\n", bnk->ksp_rtol);CHKERRQ(ierr); 1246 ierr = PetscViewerASCIIPrintf(viewer, " ctol: %D\n", bnk->ksp_ctol);CHKERRQ(ierr); 1247 ierr = PetscViewerASCIIPrintf(viewer, " negc: %D\n", bnk->ksp_negc);CHKERRQ(ierr); 1248 ierr = PetscViewerASCIIPrintf(viewer, " dtol: %D\n", bnk->ksp_dtol);CHKERRQ(ierr); 1249 ierr = PetscViewerASCIIPrintf(viewer, " iter: %D\n", bnk->ksp_iter);CHKERRQ(ierr); 1250 ierr = PetscViewerASCIIPrintf(viewer, " othr: %D\n", bnk->ksp_othr);CHKERRQ(ierr); 1251 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 1252 } 1253 PetscFunctionReturn(0); 1254 } 1255 1256 /* ---------------------------------------------------------- */ 1257 1258 /*MC 1259 TAOBNK - Shared base-type for Bounded Newton-Krylov type algorithms. 1260 At each iteration, the BNK methods solve the symmetric 1261 system of equations to obtain the step diretion dk: 1262 Hk dk = -gk 1263 for free variables only. The step can be globalized either through 1264 trust-region methods, or a line search, or a heuristic mixture of both. 1265 1266 Options Database Keys: 1267 + -max_cg_its - maximum number of bounded conjugate-gradient iterations taken in each Newton loop 1268 . -init_type - trust radius initialization method ("constant", "direction", "interpolation") 1269 . -update_type - trust radius update method ("step", "direction", "interpolation") 1270 . -as_type - active-set estimation method ("none", "bertsekas") 1271 . -as_tol - (developer) initial tolerance used in estimating bounded active variables (-as_type bertsekas) 1272 . -as_step - (developer) trial step length used in estimating bounded active variables (-as_type bertsekas) 1273 . -sval - (developer) Hessian perturbation starting value 1274 . -imin - (developer) minimum initial Hessian perturbation 1275 . -imax - (developer) maximum initial Hessian perturbation 1276 . -pmin - (developer) minimum Hessian perturbation 1277 . -pmax - (developer) aximum Hessian perturbation 1278 . -pgfac - (developer) Hessian perturbation growth factor 1279 . -psfac - (developer) Hessian perturbation shrink factor 1280 . -imfac - (developer) initial merit factor for Hessian perturbation 1281 . -pmgfac - (developer) merit growth factor for Hessian perturbation 1282 . -pmsfac - (developer) merit shrink factor for Hessian perturbation 1283 . -eta1 - (developer) threshold for rejecting step (-update_type reduction) 1284 . -eta2 - (developer) threshold for accepting marginal step (-update_type reduction) 1285 . -eta3 - (developer) threshold for accepting reasonable step (-update_type reduction) 1286 . -eta4 - (developer) threshold for accepting good step (-update_type reduction) 1287 . -alpha1 - (developer) radius reduction factor for rejected step (-update_type reduction) 1288 . -alpha2 - (developer) radius reduction factor for marginally accepted bad step (-update_type reduction) 1289 . -alpha3 - (developer) radius increase factor for reasonable accepted step (-update_type reduction) 1290 . -alpha4 - (developer) radius increase factor for good accepted step (-update_type reduction) 1291 . -alpha5 - (developer) radius increase factor for very good accepted step (-update_type reduction) 1292 . -epsilon - (developer) tolerance for small pred/actual ratios that trigger automatic step acceptance (-update_type reduction) 1293 . -mu1 - (developer) threshold for accepting very good step (-update_type interpolation) 1294 . -mu2 - (developer) threshold for accepting good step (-update_type interpolation) 1295 . -gamma1 - (developer) radius reduction factor for rejected very bad step (-update_type interpolation) 1296 . -gamma2 - (developer) radius reduction factor for rejected bad step (-update_type interpolation) 1297 . -gamma3 - (developer) radius increase factor for accepted good step (-update_type interpolation) 1298 . -gamma4 - (developer) radius increase factor for accepted very good step (-update_type interpolation) 1299 . -theta - (developer) trust region interpolation factor (-update_type interpolation) 1300 . -nu1 - (developer) threshold for small line-search step length (-update_type step) 1301 . -nu2 - (developer) threshold for reasonable line-search step length (-update_type step) 1302 . -nu3 - (developer) threshold for large line-search step length (-update_type step) 1303 . -nu4 - (developer) threshold for very large line-search step length (-update_type step) 1304 . -omega1 - (developer) radius reduction factor for very small line-search step length (-update_type step) 1305 . -omega2 - (developer) radius reduction factor for small line-search step length (-update_type step) 1306 . -omega3 - (developer) radius factor for decent line-search step length (-update_type step) 1307 . -omega4 - (developer) radius increase factor for large line-search step length (-update_type step) 1308 . -omega5 - (developer) radius increase factor for very large line-search step length (-update_type step) 1309 . -mu1_i - (developer) threshold for accepting very good step (-init_type interpolation) 1310 . -mu2_i - (developer) threshold for accepting good step (-init_type interpolation) 1311 . -gamma1_i - (developer) radius reduction factor for rejected very bad step (-init_type interpolation) 1312 . -gamma2_i - (developer) radius reduction factor for rejected bad step (-init_type interpolation) 1313 . -gamma3_i - (developer) radius increase factor for accepted good step (-init_type interpolation) 1314 . -gamma4_i - (developer) radius increase factor for accepted very good step (-init_type interpolation) 1315 - -theta_i - (developer) trust region interpolation factor (-init_type interpolation) 1316 1317 Level: beginner 1318 M*/ 1319 1320 PetscErrorCode TaoCreate_BNK(Tao tao) 1321 { 1322 TAO_BNK *bnk; 1323 const char *morethuente_type = TAOLINESEARCHMT; 1324 PetscErrorCode ierr; 1325 PC pc; 1326 1327 PetscFunctionBegin; 1328 ierr = PetscNewLog(tao,&bnk);CHKERRQ(ierr); 1329 1330 tao->ops->setup = TaoSetUp_BNK; 1331 tao->ops->view = TaoView_BNK; 1332 tao->ops->setfromoptions = TaoSetFromOptions_BNK; 1333 tao->ops->destroy = TaoDestroy_BNK; 1334 1335 /* Override default settings (unless already changed) */ 1336 if (!tao->max_it_changed) tao->max_it = 50; 1337 if (!tao->trust0_changed) tao->trust0 = 100.0; 1338 1339 tao->data = (void*)bnk; 1340 1341 /* Hessian shifting parameters */ 1342 bnk->computehessian = TaoBNKComputeHessian; 1343 bnk->computestep = TaoBNKComputeStep; 1344 1345 bnk->sval = 0.0; 1346 bnk->imin = 1.0e-4; 1347 bnk->imax = 1.0e+2; 1348 bnk->imfac = 1.0e-1; 1349 1350 bnk->pmin = 1.0e-12; 1351 bnk->pmax = 1.0e+2; 1352 bnk->pgfac = 1.0e+1; 1353 bnk->psfac = 4.0e-1; 1354 bnk->pmgfac = 1.0e-1; 1355 bnk->pmsfac = 1.0e-1; 1356 1357 /* Default values for trust-region radius update based on steplength */ 1358 bnk->nu1 = 0.25; 1359 bnk->nu2 = 0.50; 1360 bnk->nu3 = 1.00; 1361 bnk->nu4 = 1.25; 1362 1363 bnk->omega1 = 0.25; 1364 bnk->omega2 = 0.50; 1365 bnk->omega3 = 1.00; 1366 bnk->omega4 = 2.00; 1367 bnk->omega5 = 4.00; 1368 1369 /* Default values for trust-region radius update based on reduction */ 1370 bnk->eta1 = 1.0e-4; 1371 bnk->eta2 = 0.25; 1372 bnk->eta3 = 0.50; 1373 bnk->eta4 = 0.90; 1374 1375 bnk->alpha1 = 0.25; 1376 bnk->alpha2 = 0.50; 1377 bnk->alpha3 = 1.00; 1378 bnk->alpha4 = 2.00; 1379 bnk->alpha5 = 4.00; 1380 1381 /* Default values for trust-region radius update based on interpolation */ 1382 bnk->mu1 = 0.10; 1383 bnk->mu2 = 0.50; 1384 1385 bnk->gamma1 = 0.25; 1386 bnk->gamma2 = 0.50; 1387 bnk->gamma3 = 2.00; 1388 bnk->gamma4 = 4.00; 1389 1390 bnk->theta = 0.05; 1391 1392 /* Default values for trust region initialization based on interpolation */ 1393 bnk->mu1_i = 0.35; 1394 bnk->mu2_i = 0.50; 1395 1396 bnk->gamma1_i = 0.0625; 1397 bnk->gamma2_i = 0.5; 1398 bnk->gamma3_i = 2.0; 1399 bnk->gamma4_i = 5.0; 1400 1401 bnk->theta_i = 0.25; 1402 1403 /* Remaining parameters */ 1404 bnk->max_cg_its = 0; 1405 bnk->min_radius = 1.0e-10; 1406 bnk->max_radius = 1.0e10; 1407 bnk->epsilon = PetscPowReal(PETSC_MACHINE_EPSILON, 2.0/3.0); 1408 bnk->as_tol = 1.0e-3; 1409 bnk->as_step = 1.0e-3; 1410 bnk->dmin = 1.0e-6; 1411 bnk->dmax = 1.0e6; 1412 1413 bnk->M = 0; 1414 bnk->bfgs_pre = 0; 1415 bnk->init_type = BNK_INIT_INTERPOLATION; 1416 bnk->update_type = BNK_UPDATE_REDUCTION; 1417 bnk->as_type = BNK_AS_BERTSEKAS; 1418 1419 /* Create the embedded BNCG solver */ 1420 ierr = TaoCreate(PetscObjectComm((PetscObject)tao), &bnk->bncg);CHKERRQ(ierr); 1421 ierr = PetscObjectIncrementTabLevel((PetscObject)bnk->bncg, (PetscObject)tao, 1);CHKERRQ(ierr); 1422 ierr = TaoSetOptionsPrefix(bnk->bncg, "tao_bnk_");CHKERRQ(ierr); 1423 ierr = TaoSetType(bnk->bncg, TAOBNCG);CHKERRQ(ierr); 1424 1425 /* Create the line search */ 1426 ierr = TaoLineSearchCreate(((PetscObject)tao)->comm,&tao->linesearch);CHKERRQ(ierr); 1427 ierr = PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);CHKERRQ(ierr); 1428 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr); 1429 ierr = TaoLineSearchSetType(tao->linesearch,morethuente_type);CHKERRQ(ierr); 1430 ierr = TaoLineSearchUseTaoRoutines(tao->linesearch,tao);CHKERRQ(ierr); 1431 1432 /* Set linear solver to default for symmetric matrices */ 1433 ierr = KSPCreate(((PetscObject)tao)->comm,&tao->ksp);CHKERRQ(ierr); 1434 ierr = PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1);CHKERRQ(ierr); 1435 ierr = KSPSetOptionsPrefix(tao->ksp,"tao_bnk_");CHKERRQ(ierr); 1436 ierr = KSPSetType(tao->ksp,KSPCGSTCG);CHKERRQ(ierr); 1437 ierr = KSPGetPC(tao->ksp, &pc); 1438 ierr = PCSetType(pc, PCLMVM);CHKERRQ(ierr); 1439 PetscFunctionReturn(0); 1440 } 1441