1 #include <../src/tao/bound/impls/tron/tron.h> 2 #include <../src/tao/matrix/submatfree.h> 3 4 /* TRON Routines */ 5 static PetscErrorCode TronGradientProjections(Tao,TAO_TRON*); 6 /*------------------------------------------------------------*/ 7 static PetscErrorCode TaoDestroy_TRON(Tao tao) 8 { 9 TAO_TRON *tron = (TAO_TRON *)tao->data; 10 11 PetscFunctionBegin; 12 PetscCall(VecDestroy(&tron->X_New)); 13 PetscCall(VecDestroy(&tron->G_New)); 14 PetscCall(VecDestroy(&tron->Work)); 15 PetscCall(VecDestroy(&tron->DXFree)); 16 PetscCall(VecDestroy(&tron->R)); 17 PetscCall(VecDestroy(&tron->diag)); 18 PetscCall(VecScatterDestroy(&tron->scatter)); 19 PetscCall(ISDestroy(&tron->Free_Local)); 20 PetscCall(MatDestroy(&tron->H_sub)); 21 PetscCall(MatDestroy(&tron->Hpre_sub)); 22 PetscCall(KSPDestroy(&tao->ksp)); 23 PetscCall(PetscFree(tao->data)); 24 PetscFunctionReturn(0); 25 } 26 27 /*------------------------------------------------------------*/ 28 static PetscErrorCode TaoSetFromOptions_TRON(PetscOptionItems *PetscOptionsObject,Tao tao) 29 { 30 TAO_TRON *tron = (TAO_TRON *)tao->data; 31 PetscBool flg; 32 33 PetscFunctionBegin; 34 PetscOptionsHeadBegin(PetscOptionsObject,"Newton Trust Region Method for bound constrained optimization"); 35 PetscCall(PetscOptionsInt("-tao_tron_maxgpits","maximum number of gradient projections per TRON iterate","TaoSetMaxGPIts",tron->maxgpits,&tron->maxgpits,&flg)); 36 PetscOptionsHeadEnd(); 37 PetscCall(KSPSetFromOptions(tao->ksp)); 38 PetscFunctionReturn(0); 39 } 40 41 /*------------------------------------------------------------*/ 42 static PetscErrorCode TaoView_TRON(Tao tao, PetscViewer viewer) 43 { 44 TAO_TRON *tron = (TAO_TRON *)tao->data; 45 PetscBool isascii; 46 47 PetscFunctionBegin; 48 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii)); 49 if (isascii) { 50 PetscCall(PetscViewerASCIIPrintf(viewer,"Total PG its: %" PetscInt_FMT ",",tron->total_gp_its)); 51 PetscCall(PetscViewerASCIIPrintf(viewer,"PG tolerance: %g \n",(double)tron->pg_ftol)); 52 } 53 PetscFunctionReturn(0); 54 } 55 56 /* ---------------------------------------------------------- */ 57 static PetscErrorCode TaoSetup_TRON(Tao tao) 58 { 59 TAO_TRON *tron = (TAO_TRON *)tao->data; 60 61 PetscFunctionBegin; 62 /* Allocate some arrays */ 63 PetscCall(VecDuplicate(tao->solution, &tron->diag)); 64 PetscCall(VecDuplicate(tao->solution, &tron->X_New)); 65 PetscCall(VecDuplicate(tao->solution, &tron->G_New)); 66 PetscCall(VecDuplicate(tao->solution, &tron->Work)); 67 PetscCall(VecDuplicate(tao->solution, &tao->gradient)); 68 PetscCall(VecDuplicate(tao->solution, &tao->stepdirection)); 69 PetscFunctionReturn(0); 70 } 71 72 static PetscErrorCode TaoSolve_TRON(Tao tao) 73 { 74 TAO_TRON *tron = (TAO_TRON *)tao->data; 75 PetscInt its; 76 TaoLineSearchConvergedReason ls_reason = TAOLINESEARCH_CONTINUE_ITERATING; 77 PetscReal prered,actred,delta,f,f_new,rhok,gdx,xdiff,stepsize; 78 79 PetscFunctionBegin; 80 tron->pgstepsize = 1.0; 81 tao->trust = tao->trust0; 82 /* Project the current point onto the feasible set */ 83 PetscCall(TaoComputeVariableBounds(tao)); 84 PetscCall(TaoLineSearchSetVariableBounds(tao->linesearch,tao->XL,tao->XU)); 85 86 /* Project the initial point onto the feasible region */ 87 PetscCall(VecMedian(tao->XL,tao->solution,tao->XU,tao->solution)); 88 89 /* Compute the objective function and gradient */ 90 PetscCall(TaoComputeObjectiveAndGradient(tao,tao->solution,&tron->f,tao->gradient)); 91 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 92 PetscCheck(!PetscIsInfOrNanReal(tron->f) && !PetscIsInfOrNanReal(tron->gnorm),PetscObjectComm((PetscObject)tao),PETSC_ERR_USER, "User provided compute function generated Inf or NaN"); 93 94 /* Project the gradient and calculate the norm */ 95 PetscCall(VecBoundGradientProjection(tao->gradient,tao->solution,tao->XL,tao->XU,tao->gradient)); 96 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 97 98 /* Initialize trust region radius */ 99 tao->trust=tao->trust0; 100 if (tao->trust <= 0) { 101 tao->trust=PetscMax(tron->gnorm*tron->gnorm,1.0); 102 } 103 104 /* Initialize step sizes for the line searches */ 105 tron->pgstepsize=1.0; 106 tron->stepsize=tao->trust; 107 108 tao->reason = TAO_CONTINUE_ITERATING; 109 PetscCall(TaoLogConvergenceHistory(tao,tron->f,tron->gnorm,0.0,tao->ksp_its)); 110 PetscCall(TaoMonitor(tao,tao->niter,tron->f,tron->gnorm,0.0,tron->stepsize)); 111 PetscCall((*tao->ops->convergencetest)(tao,tao->cnvP)); 112 while (tao->reason==TAO_CONTINUE_ITERATING) { 113 /* Call general purpose update function */ 114 if (tao->ops->update) { 115 PetscCall((*tao->ops->update)(tao, tao->niter, tao->user_update)); 116 } 117 118 /* Perform projected gradient iterations */ 119 PetscCall(TronGradientProjections(tao,tron)); 120 121 PetscCall(VecBoundGradientProjection(tao->gradient,tao->solution,tao->XL,tao->XU,tao->gradient)); 122 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 123 124 tao->ksp_its=0; 125 f=tron->f; delta=tao->trust; 126 tron->n_free_last = tron->n_free; 127 PetscCall(TaoComputeHessian(tao,tao->solution,tao->hessian,tao->hessian_pre)); 128 129 /* Generate index set (IS) of which bound constraints are active */ 130 PetscCall(ISDestroy(&tron->Free_Local)); 131 PetscCall(VecWhichInactive(tao->XL,tao->solution,tao->gradient,tao->XU,PETSC_TRUE,&tron->Free_Local)); 132 PetscCall(ISGetSize(tron->Free_Local, &tron->n_free)); 133 134 /* If no free variables */ 135 if (tron->n_free == 0) { 136 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 137 PetscCall(TaoLogConvergenceHistory(tao,tron->f,tron->gnorm,0.0,tao->ksp_its)); 138 PetscCall(TaoMonitor(tao,tao->niter,tron->f,tron->gnorm,0.0,delta)); 139 PetscCall((*tao->ops->convergencetest)(tao,tao->cnvP)); 140 if (!tao->reason) { 141 tao->reason = TAO_CONVERGED_STEPTOL; 142 } 143 break; 144 } 145 /* use free_local to mask/submat gradient, hessian, stepdirection */ 146 PetscCall(TaoVecGetSubVec(tao->gradient,tron->Free_Local,tao->subset_type,0.0,&tron->R)); 147 PetscCall(TaoVecGetSubVec(tao->gradient,tron->Free_Local,tao->subset_type,0.0,&tron->DXFree)); 148 PetscCall(VecSet(tron->DXFree,0.0)); 149 PetscCall(VecScale(tron->R, -1.0)); 150 PetscCall(TaoMatGetSubMat(tao->hessian, tron->Free_Local, tron->diag, tao->subset_type, &tron->H_sub)); 151 if (tao->hessian == tao->hessian_pre) { 152 PetscCall(MatDestroy(&tron->Hpre_sub)); 153 PetscCall(PetscObjectReference((PetscObject)(tron->H_sub))); 154 tron->Hpre_sub = tron->H_sub; 155 } else { 156 PetscCall(TaoMatGetSubMat(tao->hessian_pre, tron->Free_Local, tron->diag, tao->subset_type,&tron->Hpre_sub)); 157 } 158 PetscCall(KSPReset(tao->ksp)); 159 PetscCall(KSPSetOperators(tao->ksp, tron->H_sub, tron->Hpre_sub)); 160 while (1) { 161 162 /* Approximately solve the reduced linear system */ 163 PetscCall(KSPCGSetRadius(tao->ksp,delta)); 164 165 PetscCall(KSPSolve(tao->ksp, tron->R, tron->DXFree)); 166 PetscCall(KSPGetIterationNumber(tao->ksp,&its)); 167 tao->ksp_its+=its; 168 tao->ksp_tot_its+=its; 169 PetscCall(VecSet(tao->stepdirection,0.0)); 170 171 /* Add dxfree matrix to compute step direction vector */ 172 PetscCall(VecISAXPY(tao->stepdirection,tron->Free_Local,1.0,tron->DXFree)); 173 174 PetscCall(VecDot(tao->gradient, tao->stepdirection, &gdx)); 175 PetscCall(VecCopy(tao->solution, tron->X_New)); 176 PetscCall(VecCopy(tao->gradient, tron->G_New)); 177 178 stepsize=1.0;f_new=f; 179 180 PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch,1.0)); 181 PetscCall(TaoLineSearchApply(tao->linesearch, tron->X_New, &f_new, tron->G_New, tao->stepdirection,&stepsize,&ls_reason)); 182 PetscCall(TaoAddLineSearchCounts(tao)); 183 184 PetscCall(MatMult(tao->hessian, tao->stepdirection, tron->Work)); 185 PetscCall(VecAYPX(tron->Work, 0.5, tao->gradient)); 186 PetscCall(VecDot(tao->stepdirection, tron->Work, &prered)); 187 actred = f_new - f; 188 if ((PetscAbsScalar(actred) <= 1e-6) && (PetscAbsScalar(prered) <= 1e-6)) { 189 rhok = 1.0; 190 } else if (actred<0) { 191 rhok=PetscAbs(-actred/prered); 192 } else { 193 rhok=0.0; 194 } 195 196 /* Compare actual improvement to the quadratic model */ 197 if (rhok > tron->eta1) { /* Accept the point */ 198 /* d = x_new - x */ 199 PetscCall(VecCopy(tron->X_New, tao->stepdirection)); 200 PetscCall(VecAXPY(tao->stepdirection, -1.0, tao->solution)); 201 202 PetscCall(VecNorm(tao->stepdirection, NORM_2, &xdiff)); 203 xdiff *= stepsize; 204 205 /* Adjust trust region size */ 206 if (rhok < tron->eta2) { 207 delta = PetscMin(xdiff,delta)*tron->sigma1; 208 } else if (rhok > tron->eta4) { 209 delta= PetscMin(xdiff,delta)*tron->sigma3; 210 } else if (rhok > tron->eta3) { 211 delta=PetscMin(xdiff,delta)*tron->sigma2; 212 } 213 PetscCall(VecBoundGradientProjection(tron->G_New,tron->X_New, tao->XL, tao->XU, tao->gradient)); 214 PetscCall(ISDestroy(&tron->Free_Local)); 215 PetscCall(VecWhichInactive(tao->XL,tron->X_New,tao->gradient,tao->XU,PETSC_TRUE,&tron->Free_Local)); 216 f=f_new; 217 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 218 PetscCall(VecCopy(tron->X_New, tao->solution)); 219 PetscCall(VecCopy(tron->G_New, tao->gradient)); 220 break; 221 } 222 else if (delta <= 1e-30) { 223 break; 224 } 225 else { 226 delta /= 4.0; 227 } 228 } /* end linear solve loop */ 229 230 tron->f=f; tron->actred=actred; tao->trust=delta; 231 tao->niter++; 232 PetscCall(TaoLogConvergenceHistory(tao,tron->f,tron->gnorm,0.0,tao->ksp_its)); 233 PetscCall(TaoMonitor(tao,tao->niter,tron->f,tron->gnorm,0.0,stepsize)); 234 PetscCall((*tao->ops->convergencetest)(tao,tao->cnvP)); 235 } /* END MAIN LOOP */ 236 PetscFunctionReturn(0); 237 } 238 239 static PetscErrorCode TronGradientProjections(Tao tao,TAO_TRON *tron) 240 { 241 PetscInt i; 242 TaoLineSearchConvergedReason ls_reason; 243 PetscReal actred=-1.0,actred_max=0.0; 244 PetscReal f_new; 245 /* 246 The gradient and function value passed into and out of this 247 routine should be current and correct. 248 249 The free, active, and binding variables should be already identified 250 */ 251 PetscFunctionBegin; 252 253 for (i=0;i<tron->maxgpits;++i) { 254 255 if (-actred <= (tron->pg_ftol)*actred_max) break; 256 257 ++tron->gp_iterates; 258 ++tron->total_gp_its; 259 f_new=tron->f; 260 261 PetscCall(VecCopy(tao->gradient,tao->stepdirection)); 262 PetscCall(VecScale(tao->stepdirection,-1.0)); 263 PetscCall(TaoLineSearchSetInitialStepLength(tao->linesearch,tron->pgstepsize)); 264 PetscCall(TaoLineSearchApply(tao->linesearch, tao->solution, &f_new, tao->gradient, tao->stepdirection,&tron->pgstepsize, &ls_reason)); 265 PetscCall(TaoAddLineSearchCounts(tao)); 266 267 PetscCall(VecBoundGradientProjection(tao->gradient,tao->solution,tao->XL,tao->XU,tao->gradient)); 268 PetscCall(VecNorm(tao->gradient,NORM_2,&tron->gnorm)); 269 270 /* Update the iterate */ 271 actred = f_new - tron->f; 272 actred_max = PetscMax(actred_max,-(f_new - tron->f)); 273 tron->f = f_new; 274 } 275 PetscFunctionReturn(0); 276 } 277 278 static PetscErrorCode TaoComputeDual_TRON(Tao tao, Vec DXL, Vec DXU) 279 { 280 281 TAO_TRON *tron = (TAO_TRON *)tao->data; 282 283 PetscFunctionBegin; 284 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 285 PetscValidHeaderSpecific(DXL,VEC_CLASSID,2); 286 PetscValidHeaderSpecific(DXU,VEC_CLASSID,3); 287 PetscCheck(tron->Work && tao->gradient,PETSC_COMM_SELF,PETSC_ERR_ORDER,"Dual variables don't exist yet or no longer exist."); 288 289 PetscCall(VecBoundGradientProjection(tao->gradient,tao->solution,tao->XL,tao->XU,tron->Work)); 290 PetscCall(VecCopy(tron->Work,DXL)); 291 PetscCall(VecAXPY(DXL,-1.0,tao->gradient)); 292 PetscCall(VecSet(DXU,0.0)); 293 PetscCall(VecPointwiseMax(DXL,DXL,DXU)); 294 295 PetscCall(VecCopy(tao->gradient,DXU)); 296 PetscCall(VecAXPY(DXU,-1.0,tron->Work)); 297 PetscCall(VecSet(tron->Work,0.0)); 298 PetscCall(VecPointwiseMin(DXU,tron->Work,DXU)); 299 PetscFunctionReturn(0); 300 } 301 302 /*------------------------------------------------------------*/ 303 /*MC 304 TAOTRON - The TRON algorithm is an active-set Newton trust region method 305 for bound-constrained minimization. 306 307 Options Database Keys: 308 + -tao_tron_maxgpits - maximum number of gradient projections per TRON iterate 309 - -tao_subset_type - "subvec","mask","matrix-free", strategies for handling active-sets 310 311 Level: beginner 312 M*/ 313 PETSC_EXTERN PetscErrorCode TaoCreate_TRON(Tao tao) 314 { 315 TAO_TRON *tron; 316 const char *morethuente_type = TAOLINESEARCHMT; 317 318 PetscFunctionBegin; 319 tao->ops->setup = TaoSetup_TRON; 320 tao->ops->solve = TaoSolve_TRON; 321 tao->ops->view = TaoView_TRON; 322 tao->ops->setfromoptions = TaoSetFromOptions_TRON; 323 tao->ops->destroy = TaoDestroy_TRON; 324 tao->ops->computedual = TaoComputeDual_TRON; 325 326 PetscCall(PetscNewLog(tao,&tron)); 327 tao->data = (void*)tron; 328 329 /* Override default settings (unless already changed) */ 330 if (!tao->max_it_changed) tao->max_it = 50; 331 if (!tao->trust0_changed) tao->trust0 = 1.0; 332 if (!tao->steptol_changed) tao->steptol = 0.0; 333 334 /* Initialize pointers and variables */ 335 tron->n = 0; 336 tron->maxgpits = 3; 337 tron->pg_ftol = 0.001; 338 339 tron->eta1 = 1.0e-4; 340 tron->eta2 = 0.25; 341 tron->eta3 = 0.50; 342 tron->eta4 = 0.90; 343 344 tron->sigma1 = 0.5; 345 tron->sigma2 = 2.0; 346 tron->sigma3 = 4.0; 347 348 tron->gp_iterates = 0; /* Cumulative number */ 349 tron->total_gp_its = 0; 350 tron->n_free = 0; 351 352 tron->DXFree=NULL; 353 tron->R=NULL; 354 tron->X_New=NULL; 355 tron->G_New=NULL; 356 tron->Work=NULL; 357 tron->Free_Local=NULL; 358 tron->H_sub=NULL; 359 tron->Hpre_sub=NULL; 360 tao->subset_type = TAO_SUBSET_SUBVEC; 361 362 PetscCall(TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch)); 363 PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1)); 364 PetscCall(TaoLineSearchSetType(tao->linesearch,morethuente_type)); 365 PetscCall(TaoLineSearchUseTaoRoutines(tao->linesearch,tao)); 366 PetscCall(TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix)); 367 368 PetscCall(KSPCreate(((PetscObject)tao)->comm, &tao->ksp)); 369 PetscCall(PetscObjectIncrementTabLevel((PetscObject)tao->ksp, (PetscObject)tao, 1)); 370 PetscCall(KSPSetOptionsPrefix(tao->ksp, tao->hdr.prefix)); 371 PetscCall(KSPSetType(tao->ksp,KSPSTCG)); 372 PetscFunctionReturn(0); 373 } 374