1 #include <petsctaolinesearch.h> 2 #include <../src/tao/unconstrained/impls/cg/taocg.h> 3 4 #define CG_FletcherReeves 0 5 #define CG_PolakRibiere 1 6 #define CG_PolakRibierePlus 2 7 #define CG_HestenesStiefel 3 8 #define CG_DaiYuan 4 9 #define CG_Types 5 10 11 static const char *CG_Table[64] = {"fr", "pr", "prp", "hs", "dy"}; 12 13 static PetscErrorCode TaoSolve_CG(Tao tao) 14 { 15 TAO_CG *cgP = (TAO_CG*)tao->data; 16 PetscErrorCode ierr; 17 TaoLineSearchConvergedReason ls_status = TAOLINESEARCH_CONTINUE_ITERATING; 18 PetscReal step=1.0,f,gnorm,gnorm2,delta,gd,ginner,beta; 19 PetscReal gd_old,gnorm2_old,f_old; 20 21 PetscFunctionBegin; 22 if (tao->XL || tao->XU || tao->ops->computebounds) { 23 ierr = PetscPrintf(((PetscObject)tao)->comm,"WARNING: Variable bounds have been set but will be ignored by cg algorithm\n");CHKERRQ(ierr); 24 } 25 26 /* Check convergence criteria */ 27 ierr = TaoComputeObjectiveAndGradient(tao, tao->solution, &f, tao->gradient);CHKERRQ(ierr); 28 ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr); 29 if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(gnorm)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 30 31 tao->reason = TAO_CONTINUE_ITERATING; 32 ierr = TaoLogConvergenceHistory(tao,f,gnorm,0.0,tao->ksp_its);CHKERRQ(ierr); 33 ierr = TaoMonitor(tao,tao->niter,f,gnorm,0.0,step);CHKERRQ(ierr); 34 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 35 if (tao->reason != TAO_CONTINUE_ITERATING) PetscFunctionReturn(0); 36 37 /* Set initial direction to -gradient */ 38 ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr); 39 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 40 gnorm2 = gnorm*gnorm; 41 42 /* Set initial scaling for the function */ 43 if (f != 0.0) { 44 delta = 2.0*PetscAbsScalar(f) / gnorm2; 45 delta = PetscMax(delta,cgP->delta_min); 46 delta = PetscMin(delta,cgP->delta_max); 47 } else { 48 delta = 2.0 / gnorm2; 49 delta = PetscMax(delta,cgP->delta_min); 50 delta = PetscMin(delta,cgP->delta_max); 51 } 52 /* Set counter for gradient and reset steps */ 53 cgP->ngradsteps = 0; 54 cgP->nresetsteps = 0; 55 56 while (1) { 57 /* Save the current gradient information */ 58 f_old = f; 59 gnorm2_old = gnorm2; 60 ierr = VecCopy(tao->solution, cgP->X_old);CHKERRQ(ierr); 61 ierr = VecCopy(tao->gradient, cgP->G_old);CHKERRQ(ierr); 62 ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr); 63 if ((gd >= 0) || PetscIsInfOrNanReal(gd)) { 64 ++cgP->ngradsteps; 65 if (f != 0.0) { 66 delta = 2.0*PetscAbsScalar(f) / gnorm2; 67 delta = PetscMax(delta,cgP->delta_min); 68 delta = PetscMin(delta,cgP->delta_max); 69 } else { 70 delta = 2.0 / gnorm2; 71 delta = PetscMax(delta,cgP->delta_min); 72 delta = PetscMin(delta,cgP->delta_max); 73 } 74 75 ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr); 76 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 77 } 78 79 /* Search direction for improving point */ 80 ierr = TaoLineSearchSetInitialStepLength(tao->linesearch,delta);CHKERRQ(ierr); 81 ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &f, tao->gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr); 82 ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr); 83 if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER) { 84 /* Linesearch failed */ 85 /* Reset factors and use scaled gradient step */ 86 ++cgP->nresetsteps; 87 f = f_old; 88 gnorm2 = gnorm2_old; 89 ierr = VecCopy(cgP->X_old, tao->solution);CHKERRQ(ierr); 90 ierr = VecCopy(cgP->G_old, tao->gradient);CHKERRQ(ierr); 91 92 if (f != 0.0) { 93 delta = 2.0*PetscAbsScalar(f) / gnorm2; 94 delta = PetscMax(delta,cgP->delta_min); 95 delta = PetscMin(delta,cgP->delta_max); 96 } else { 97 delta = 2.0 / gnorm2; 98 delta = PetscMax(delta,cgP->delta_min); 99 delta = PetscMin(delta,cgP->delta_max); 100 } 101 102 ierr = VecCopy(tao->gradient, tao->stepdirection);CHKERRQ(ierr); 103 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 104 105 ierr = TaoLineSearchSetInitialStepLength(tao->linesearch,delta);CHKERRQ(ierr); 106 ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &f, tao->gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr); 107 ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr); 108 109 if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER) { 110 /* Linesearch failed again */ 111 /* switch to unscaled gradient */ 112 f = f_old; 113 ierr = VecCopy(cgP->X_old, tao->solution);CHKERRQ(ierr); 114 ierr = VecCopy(cgP->G_old, tao->gradient);CHKERRQ(ierr); 115 delta = 1.0; 116 ierr = VecCopy(tao->solution, tao->stepdirection);CHKERRQ(ierr); 117 ierr = VecScale(tao->stepdirection, -1.0);CHKERRQ(ierr); 118 119 ierr = TaoLineSearchSetInitialStepLength(tao->linesearch,delta);CHKERRQ(ierr); 120 ierr = TaoLineSearchApply(tao->linesearch, tao->solution, &f, tao->gradient, tao->stepdirection, &step, &ls_status);CHKERRQ(ierr); 121 ierr = TaoAddLineSearchCounts(tao);CHKERRQ(ierr); 122 if (ls_status != TAOLINESEARCH_SUCCESS && ls_status != TAOLINESEARCH_SUCCESS_USER) { 123 124 /* Line search failed for last time -- give up */ 125 f = f_old; 126 ierr = VecCopy(cgP->X_old, tao->solution);CHKERRQ(ierr); 127 ierr = VecCopy(cgP->G_old, tao->gradient);CHKERRQ(ierr); 128 step = 0.0; 129 tao->reason = TAO_DIVERGED_LS_FAILURE; 130 } 131 } 132 } 133 134 /* Check for bad value */ 135 ierr = VecNorm(tao->gradient,NORM_2,&gnorm);CHKERRQ(ierr); 136 if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(gnorm)) SETERRQ(PETSC_COMM_SELF,1,"User-provided compute function generated Inf or NaN"); 137 138 /* Check for termination */ 139 gnorm2 =gnorm * gnorm; 140 tao->niter++; 141 ierr = TaoLogConvergenceHistory(tao,f,gnorm,0.0,tao->ksp_its);CHKERRQ(ierr); 142 ierr = TaoMonitor(tao,tao->niter,f,gnorm,0.0,step);CHKERRQ(ierr); 143 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 144 if (tao->reason != TAO_CONTINUE_ITERATING) { 145 break; 146 } 147 148 /* Check for restart condition */ 149 ierr = VecDot(tao->gradient, cgP->G_old, &ginner);CHKERRQ(ierr); 150 if (PetscAbsScalar(ginner) >= cgP->eta * gnorm2) { 151 /* Gradients far from orthognal; use steepest descent direction */ 152 beta = 0.0; 153 } else { 154 /* Gradients close to orthogonal; use conjugate gradient formula */ 155 switch (cgP->cg_type) { 156 case CG_FletcherReeves: 157 beta = gnorm2 / gnorm2_old; 158 break; 159 160 case CG_PolakRibiere: 161 beta = (gnorm2 - ginner) / gnorm2_old; 162 break; 163 164 case CG_PolakRibierePlus: 165 beta = PetscMax((gnorm2-ginner)/gnorm2_old, 0.0); 166 break; 167 168 case CG_HestenesStiefel: 169 ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr); 170 ierr = VecDot(cgP->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr); 171 beta = (gnorm2 - ginner) / (gd - gd_old); 172 break; 173 174 case CG_DaiYuan: 175 ierr = VecDot(tao->gradient, tao->stepdirection, &gd);CHKERRQ(ierr); 176 ierr = VecDot(cgP->G_old, tao->stepdirection, &gd_old);CHKERRQ(ierr); 177 beta = gnorm2 / (gd - gd_old); 178 break; 179 180 default: 181 beta = 0.0; 182 break; 183 } 184 } 185 186 /* Compute the direction d=-g + beta*d */ 187 ierr = VecAXPBY(tao->stepdirection, -1.0, beta, tao->gradient);CHKERRQ(ierr); 188 189 /* update initial steplength choice */ 190 delta = 1.0; 191 delta = PetscMax(delta, cgP->delta_min); 192 delta = PetscMin(delta, cgP->delta_max); 193 } 194 PetscFunctionReturn(0); 195 } 196 197 static PetscErrorCode TaoSetUp_CG(Tao tao) 198 { 199 TAO_CG *cgP = (TAO_CG*)tao->data; 200 PetscErrorCode ierr; 201 202 PetscFunctionBegin; 203 if (!tao->gradient) {ierr = VecDuplicate(tao->solution,&tao->gradient);CHKERRQ(ierr);} 204 if (!tao->stepdirection) {ierr = VecDuplicate(tao->solution,&tao->stepdirection);CHKERRQ(ierr); } 205 if (!cgP->X_old) {ierr = VecDuplicate(tao->solution,&cgP->X_old);CHKERRQ(ierr);} 206 if (!cgP->G_old) {ierr = VecDuplicate(tao->gradient,&cgP->G_old);CHKERRQ(ierr); } 207 PetscFunctionReturn(0); 208 } 209 210 static PetscErrorCode TaoDestroy_CG(Tao tao) 211 { 212 TAO_CG *cgP = (TAO_CG*) tao->data; 213 PetscErrorCode ierr; 214 215 PetscFunctionBegin; 216 if (tao->setupcalled) { 217 ierr = VecDestroy(&cgP->X_old);CHKERRQ(ierr); 218 ierr = VecDestroy(&cgP->G_old);CHKERRQ(ierr); 219 } 220 ierr = TaoLineSearchDestroy(&tao->linesearch);CHKERRQ(ierr); 221 ierr = PetscFree(tao->data);CHKERRQ(ierr); 222 PetscFunctionReturn(0); 223 } 224 225 static PetscErrorCode TaoSetFromOptions_CG(PetscOptionItems *PetscOptionsObject,Tao tao) 226 { 227 TAO_CG *cgP = (TAO_CG*)tao->data; 228 PetscErrorCode ierr; 229 230 PetscFunctionBegin; 231 ierr = TaoLineSearchSetFromOptions(tao->linesearch);CHKERRQ(ierr); 232 ierr = PetscOptionsHead(PetscOptionsObject,"Nonlinear Conjugate Gradient method for unconstrained optimization");CHKERRQ(ierr); 233 ierr = PetscOptionsReal("-tao_cg_eta","restart tolerance", "", cgP->eta,&cgP->eta,NULL);CHKERRQ(ierr); 234 ierr = PetscOptionsEList("-tao_cg_type","cg formula", "", CG_Table, CG_Types, CG_Table[cgP->cg_type], &cgP->cg_type,NULL);CHKERRQ(ierr); 235 ierr = PetscOptionsReal("-tao_cg_delta_min","minimum delta value", "", cgP->delta_min,&cgP->delta_min,NULL);CHKERRQ(ierr); 236 ierr = PetscOptionsReal("-tao_cg_delta_max","maximum delta value", "", cgP->delta_max,&cgP->delta_max,NULL);CHKERRQ(ierr); 237 ierr = PetscOptionsTail();CHKERRQ(ierr); 238 PetscFunctionReturn(0); 239 } 240 241 static PetscErrorCode TaoView_CG(Tao tao, PetscViewer viewer) 242 { 243 PetscBool isascii; 244 TAO_CG *cgP = (TAO_CG*)tao->data; 245 PetscErrorCode ierr; 246 247 PetscFunctionBegin; 248 ierr = PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);CHKERRQ(ierr); 249 if (isascii) { 250 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 251 ierr = PetscViewerASCIIPrintf(viewer, "CG Type: %s\n", CG_Table[cgP->cg_type]);CHKERRQ(ierr); 252 ierr = PetscViewerASCIIPrintf(viewer, "Gradient steps: %D\n", cgP->ngradsteps);CHKERRQ(ierr); 253 ierr= PetscViewerASCIIPrintf(viewer, "Reset steps: %D\n", cgP->nresetsteps);CHKERRQ(ierr); 254 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 255 } 256 PetscFunctionReturn(0); 257 } 258 259 /*MC 260 TAOCG - Nonlinear conjugate gradient method is an extension of the 261 nonlinear conjugate gradient solver for nonlinear optimization. 262 263 Options Database Keys: 264 + -tao_cg_eta <r> - restart tolerance 265 . -tao_cg_type <taocg_type> - cg formula 266 . -tao_cg_delta_min <r> - minimum delta value 267 - -tao_cg_delta_max <r> - maximum delta value 268 269 Notes: 270 CG formulas are: 271 "fr" - Fletcher-Reeves 272 "pr" - Polak-Ribiere 273 "prp" - Polak-Ribiere-Plus 274 "hs" - Hestenes-Steifel 275 "dy" - Dai-Yuan 276 Level: beginner 277 M*/ 278 279 280 PETSC_EXTERN PetscErrorCode TaoCreate_CG(Tao tao) 281 { 282 TAO_CG *cgP; 283 const char *morethuente_type = TAOLINESEARCHMT; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 tao->ops->setup = TaoSetUp_CG; 288 tao->ops->solve = TaoSolve_CG; 289 tao->ops->view = TaoView_CG; 290 tao->ops->setfromoptions = TaoSetFromOptions_CG; 291 tao->ops->destroy = TaoDestroy_CG; 292 293 /* Override default settings (unless already changed) */ 294 if (!tao->max_it_changed) tao->max_it = 2000; 295 if (!tao->max_funcs_changed) tao->max_funcs = 4000; 296 297 /* Note: nondefault values should be used for nonlinear conjugate gradient */ 298 /* method. In particular, gtol should be less that 0.5; the value used in */ 299 /* Nocedal and Wright is 0.10. We use the default values for the */ 300 /* linesearch because it seems to work better. */ 301 ierr = TaoLineSearchCreate(((PetscObject)tao)->comm, &tao->linesearch);CHKERRQ(ierr); 302 ierr = PetscObjectIncrementTabLevel((PetscObject)tao->linesearch, (PetscObject)tao, 1);CHKERRQ(ierr); 303 ierr = TaoLineSearchSetType(tao->linesearch, morethuente_type);CHKERRQ(ierr); 304 ierr = TaoLineSearchUseTaoRoutines(tao->linesearch, tao);CHKERRQ(ierr); 305 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,tao->hdr.prefix);CHKERRQ(ierr); 306 307 ierr = PetscNewLog(tao,&cgP);CHKERRQ(ierr); 308 tao->data = (void*)cgP; 309 cgP->eta = 0.1; 310 cgP->delta_min = 1e-7; 311 cgP->delta_max = 100; 312 cgP->cg_type = CG_PolakRibierePlus; 313 PetscFunctionReturn(0); 314 } 315