1 #define TAO_DLL 2 3 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/ 4 5 PetscBool TaoRegisterAllCalled = PETSC_FALSE; 6 PetscFunctionList TaoList = NULL; 7 8 PetscClassId TAO_CLASSID; 9 PetscLogEvent Tao_Solve, Tao_ObjectiveEval, Tao_GradientEval, Tao_ObjGradientEval, Tao_HessianEval, Tao_ConstraintsEval, Tao_JacobianEval; 10 11 const char *TaoSubSetTypes[] = { "subvec","mask","matrixfree","TaoSubSetType","TAO_SUBSET_",0}; 12 13 #undef __FUNCT__ 14 #define __FUNCT__ "TaoCreate" 15 /*@ 16 TaoCreate - Creates a TAO solver 17 18 Collective on MPI_Comm 19 20 Input Parameter: 21 . comm - MPI communicator 22 23 Output Parameter: 24 . newtao - the new Tao context 25 26 Available methods include: 27 + nls - Newton's method with line search for unconstrained minimization 28 . ntr - Newton's method with trust region for unconstrained minimization 29 . ntl - Newton's method with trust region, line search for unconstrained minimization 30 . lmvm - Limited memory variable metric method for unconstrained minimization 31 . cg - Nonlinear conjugate gradient method for unconstrained minimization 32 . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization 33 . tron - Newton Trust Region method for bound constrained minimization 34 . gpcg - Newton Trust Region method for quadratic bound constrained minimization 35 . blmvm - Limited memory variable metric method for bound constrained minimization 36 . lcl - Linearly constrained Lagrangian method for pde-constrained minimization 37 - pounders - Model-based algorithm for nonlinear least squares 38 39 Options Database Keys: 40 . -tao_type - select which method TAO should use 41 42 Level: beginner 43 44 .seealso: TaoSolve(), TaoDestroy() 45 @*/ 46 PetscErrorCode TaoCreate(MPI_Comm comm, Tao *newtao) 47 { 48 PetscErrorCode ierr; 49 Tao tao; 50 51 PetscFunctionBegin; 52 PetscValidPointer(newtao,2); 53 *newtao = NULL; 54 55 ierr = TaoInitializePackage();CHKERRQ(ierr); 56 ierr = TaoLineSearchInitializePackage();CHKERRQ(ierr); 57 58 ierr = PetscHeaderCreate(tao,TAO_CLASSID,"Tao","Optimization solver","Tao",comm,TaoDestroy,TaoView);CHKERRQ(ierr); 59 tao->ops->computeobjective=0; 60 tao->ops->computeobjectiveandgradient=0; 61 tao->ops->computegradient=0; 62 tao->ops->computehessian=0; 63 tao->ops->computeseparableobjective=0; 64 tao->ops->computeconstraints=0; 65 tao->ops->computejacobian=0; 66 tao->ops->computejacobianequality=0; 67 tao->ops->computejacobianinequality=0; 68 tao->ops->computeequalityconstraints=0; 69 tao->ops->computeinequalityconstraints=0; 70 tao->ops->convergencetest=TaoDefaultConvergenceTest; 71 tao->ops->convergencedestroy=0; 72 tao->ops->computedual=0; 73 tao->ops->setup=0; 74 tao->ops->solve=0; 75 tao->ops->view=0; 76 tao->ops->setfromoptions=0; 77 tao->ops->destroy=0; 78 79 tao->solution=NULL; 80 tao->gradient=NULL; 81 tao->sep_objective = NULL; 82 tao->constraints=NULL; 83 tao->constraints_equality=NULL; 84 tao->constraints_inequality=NULL; 85 tao->sep_weights_v=NULL; 86 tao->sep_weights_w=NULL; 87 tao->stepdirection=NULL; 88 tao->niter=0; 89 tao->ntotalits=0; 90 tao->XL = NULL; 91 tao->XU = NULL; 92 tao->IL = NULL; 93 tao->IU = NULL; 94 tao->DI = NULL; 95 tao->DE = NULL; 96 tao->gradient_norm = NULL; 97 tao->gradient_norm_tmp = NULL; 98 tao->hessian = NULL; 99 tao->hessian_pre = NULL; 100 tao->jacobian = NULL; 101 tao->jacobian_pre = NULL; 102 tao->jacobian_state = NULL; 103 tao->jacobian_state_pre = NULL; 104 tao->jacobian_state_inv = NULL; 105 tao->jacobian_design = NULL; 106 tao->jacobian_design_pre = NULL; 107 tao->jacobian_equality = NULL; 108 tao->jacobian_equality_pre = NULL; 109 tao->jacobian_inequality = NULL; 110 tao->jacobian_inequality_pre = NULL; 111 tao->state_is = NULL; 112 tao->design_is = NULL; 113 114 tao->max_it = 10000; 115 tao->max_funcs = 10000; 116 #if defined(PETSC_USE_REAL_SINGLE) 117 tao->gatol = 1e-5; 118 tao->grtol = 1e-5; 119 tao->steptol = 1e-6; 120 #else 121 tao->gatol = 1e-8; 122 tao->grtol = 1e-8; 123 tao->steptol = 1e-12; 124 #endif 125 tao->crtol = 0.0; 126 tao->catol = 0.0; 127 tao->gttol = 0.0; 128 tao->trust0 = PETSC_INFINITY; 129 tao->fmin = PETSC_NINFINITY; 130 tao->hist_malloc = PETSC_FALSE; 131 tao->hist_reset = PETSC_TRUE; 132 tao->hist_max = 0; 133 tao->hist_len = 0; 134 tao->hist_obj = NULL; 135 tao->hist_resid = NULL; 136 tao->hist_cnorm = NULL; 137 tao->hist_lits = NULL; 138 139 tao->numbermonitors=0; 140 tao->viewsolution=PETSC_FALSE; 141 tao->viewhessian=PETSC_FALSE; 142 tao->viewgradient=PETSC_FALSE; 143 tao->viewjacobian=PETSC_FALSE; 144 tao->viewconstraints = PETSC_FALSE; 145 146 /* These flags prevents algorithms from overriding user options */ 147 tao->max_it_changed =PETSC_FALSE; 148 tao->max_funcs_changed=PETSC_FALSE; 149 tao->gatol_changed =PETSC_FALSE; 150 tao->grtol_changed =PETSC_FALSE; 151 tao->gttol_changed =PETSC_FALSE; 152 tao->steptol_changed =PETSC_FALSE; 153 tao->trust0_changed =PETSC_FALSE; 154 tao->fmin_changed =PETSC_FALSE; 155 tao->catol_changed =PETSC_FALSE; 156 tao->crtol_changed =PETSC_FALSE; 157 ierr = TaoResetStatistics(tao);CHKERRQ(ierr); 158 *newtao = tao; 159 PetscFunctionReturn(0); 160 } 161 162 #undef __FUNCT__ 163 #define __FUNCT__ "TaoSolve" 164 /*@ 165 TaoSolve - Solves an optimization problem min F(x) s.t. l <= x <= u 166 167 Collective on Tao 168 169 Input Parameters: 170 . tao - the Tao context 171 172 Notes: 173 The user must set up the Tao with calls to TaoSetInitialVector(), 174 TaoSetObjectiveRoutine(), 175 TaoSetGradientRoutine(), and (if using 2nd order method) TaoSetHessianRoutine(). 176 177 You should call TaoGetConvergedReason() or run with -tao_converged_reason to determine if the optimization algorithm actually succeeded or 178 why it failed. 179 180 Level: beginner 181 182 .seealso: TaoCreate(), TaoSetObjectiveRoutine(), TaoSetGradientRoutine(), TaoSetHessianRoutine(), TaoGetConvergedReason() 183 @*/ 184 PetscErrorCode TaoSolve(Tao tao) 185 { 186 PetscErrorCode ierr; 187 static PetscBool set = PETSC_FALSE; 188 189 PetscFunctionBegin; 190 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 191 ierr = PetscCitationsRegister("@TechReport{tao-user-ref,\n" 192 "title = {Toolkit for Advanced Optimization (TAO) Users Manual},\n" 193 "author = {Todd Munson and Jason Sarich and Stefan Wild and Steve Benson and Lois Curfman McInnes},\n" 194 "Institution = {Argonne National Laboratory},\n" 195 "Year = 2014,\n" 196 "Number = {ANL/MCS-TM-322 - Revision 3.5},\n" 197 "url = {http://www.mcs.anl.gov/tao}\n}\n",&set);CHKERRQ(ierr); 198 199 ierr = TaoSetUp(tao);CHKERRQ(ierr); 200 ierr = TaoResetStatistics(tao);CHKERRQ(ierr); 201 if (tao->linesearch) { 202 ierr = TaoLineSearchReset(tao->linesearch);CHKERRQ(ierr); 203 } 204 205 ierr = PetscLogEventBegin(Tao_Solve,tao,0,0,0);CHKERRQ(ierr); 206 if (tao->ops->solve){ ierr = (*tao->ops->solve)(tao);CHKERRQ(ierr); } 207 ierr = PetscLogEventEnd(Tao_Solve,tao,0,0,0);CHKERRQ(ierr); 208 209 tao->ntotalits += tao->niter; 210 ierr = TaoViewFromOptions(tao,NULL,"-tao_view");CHKERRQ(ierr); 211 212 if (tao->printreason) { 213 if (tao->reason > 0) { 214 ierr = PetscPrintf(((PetscObject)tao)->comm,"TAO solve converged due to %s iterations %D\n",TaoConvergedReasons[tao->reason],tao->niter);CHKERRQ(ierr); 215 } else { 216 ierr = PetscPrintf(((PetscObject)tao)->comm,"TAO solve did not converge due to %s iteration %D\n",TaoConvergedReasons[tao->reason],tao->niter);CHKERRQ(ierr); 217 } 218 } 219 PetscFunctionReturn(0); 220 } 221 222 #undef __FUNCT__ 223 #define __FUNCT__ "TaoSetUp" 224 /*@ 225 TaoSetUp - Sets up the internal data structures for the later use 226 of a Tao solver 227 228 Collective on tao 229 230 Input Parameters: 231 . tao - the TAO context 232 233 Notes: 234 The user will not need to explicitly call TaoSetUp(), as it will 235 automatically be called in TaoSolve(). However, if the user 236 desires to call it explicitly, it should come after TaoCreate() 237 and any TaoSetSomething() routines, but before TaoSolve(). 238 239 Level: advanced 240 241 .seealso: TaoCreate(), TaoSolve() 242 @*/ 243 PetscErrorCode TaoSetUp(Tao tao) 244 { 245 PetscErrorCode ierr; 246 247 PetscFunctionBegin; 248 PetscValidHeaderSpecific(tao, TAO_CLASSID,1); 249 if (tao->setupcalled) PetscFunctionReturn(0); 250 251 if (!tao->solution) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call TaoSetInitialVector"); 252 if (tao->ops->setup) { 253 ierr = (*tao->ops->setup)(tao);CHKERRQ(ierr); 254 } 255 tao->setupcalled = PETSC_TRUE; 256 PetscFunctionReturn(0); 257 } 258 259 #undef __FUNCT__ 260 #define __FUNCT__ "TaoDestroy" 261 /*@ 262 TaoDestroy - Destroys the TAO context that was created with 263 TaoCreate() 264 265 Collective on Tao 266 267 Input Parameter: 268 . tao - the Tao context 269 270 Level: beginner 271 272 .seealso: TaoCreate(), TaoSolve() 273 @*/ 274 PetscErrorCode TaoDestroy(Tao *tao) 275 { 276 PetscErrorCode ierr; 277 278 PetscFunctionBegin; 279 if (!*tao) PetscFunctionReturn(0); 280 PetscValidHeaderSpecific(*tao,TAO_CLASSID,1); 281 if (--((PetscObject)*tao)->refct > 0) {*tao=0;PetscFunctionReturn(0);} 282 283 if ((*tao)->ops->destroy) { 284 ierr = (*((*tao))->ops->destroy)(*tao);CHKERRQ(ierr); 285 } 286 ierr = KSPDestroy(&(*tao)->ksp);CHKERRQ(ierr); 287 ierr = TaoLineSearchDestroy(&(*tao)->linesearch);CHKERRQ(ierr); 288 289 if ((*tao)->ops->convergencedestroy) { 290 ierr = (*(*tao)->ops->convergencedestroy)((*tao)->cnvP);CHKERRQ(ierr); 291 if ((*tao)->jacobian_state_inv) { 292 ierr = MatDestroy(&(*tao)->jacobian_state_inv);CHKERRQ(ierr); 293 } 294 } 295 ierr = VecDestroy(&(*tao)->solution);CHKERRQ(ierr); 296 ierr = VecDestroy(&(*tao)->gradient);CHKERRQ(ierr); 297 298 if ((*tao)->gradient_norm) { 299 ierr = PetscObjectDereference((PetscObject)(*tao)->gradient_norm);CHKERRQ(ierr); 300 ierr = VecDestroy(&(*tao)->gradient_norm_tmp);CHKERRQ(ierr); 301 } 302 303 ierr = VecDestroy(&(*tao)->XL);CHKERRQ(ierr); 304 ierr = VecDestroy(&(*tao)->XU);CHKERRQ(ierr); 305 ierr = VecDestroy(&(*tao)->IL);CHKERRQ(ierr); 306 ierr = VecDestroy(&(*tao)->IU);CHKERRQ(ierr); 307 ierr = VecDestroy(&(*tao)->DE);CHKERRQ(ierr); 308 ierr = VecDestroy(&(*tao)->DI);CHKERRQ(ierr); 309 ierr = VecDestroy(&(*tao)->constraints_equality);CHKERRQ(ierr); 310 ierr = VecDestroy(&(*tao)->constraints_inequality);CHKERRQ(ierr); 311 ierr = VecDestroy(&(*tao)->stepdirection);CHKERRQ(ierr); 312 ierr = MatDestroy(&(*tao)->hessian_pre);CHKERRQ(ierr); 313 ierr = MatDestroy(&(*tao)->hessian);CHKERRQ(ierr); 314 ierr = MatDestroy(&(*tao)->jacobian_pre);CHKERRQ(ierr); 315 ierr = MatDestroy(&(*tao)->jacobian);CHKERRQ(ierr); 316 ierr = MatDestroy(&(*tao)->jacobian_state_pre);CHKERRQ(ierr); 317 ierr = MatDestroy(&(*tao)->jacobian_state);CHKERRQ(ierr); 318 ierr = MatDestroy(&(*tao)->jacobian_state_inv);CHKERRQ(ierr); 319 ierr = MatDestroy(&(*tao)->jacobian_design);CHKERRQ(ierr); 320 ierr = MatDestroy(&(*tao)->jacobian_equality);CHKERRQ(ierr); 321 ierr = MatDestroy(&(*tao)->jacobian_equality_pre);CHKERRQ(ierr); 322 ierr = MatDestroy(&(*tao)->jacobian_inequality);CHKERRQ(ierr); 323 ierr = MatDestroy(&(*tao)->jacobian_inequality_pre);CHKERRQ(ierr); 324 ierr = ISDestroy(&(*tao)->state_is);CHKERRQ(ierr); 325 ierr = ISDestroy(&(*tao)->design_is);CHKERRQ(ierr); 326 ierr = VecDestroy(&(*tao)->sep_weights_v);CHKERRQ(ierr); 327 ierr = TaoCancelMonitors(*tao);CHKERRQ(ierr); 328 if ((*tao)->hist_malloc) { 329 ierr = PetscFree((*tao)->hist_obj);CHKERRQ(ierr); 330 ierr = PetscFree((*tao)->hist_resid);CHKERRQ(ierr); 331 ierr = PetscFree((*tao)->hist_cnorm);CHKERRQ(ierr); 332 ierr = PetscFree((*tao)->hist_lits);CHKERRQ(ierr); 333 } 334 if ((*tao)->sep_weights_n) { 335 ierr = PetscFree((*tao)->sep_weights_rows);CHKERRQ(ierr); 336 ierr = PetscFree((*tao)->sep_weights_cols);CHKERRQ(ierr); 337 ierr = PetscFree((*tao)->sep_weights_w);CHKERRQ(ierr); 338 } 339 ierr = PetscHeaderDestroy(tao);CHKERRQ(ierr); 340 PetscFunctionReturn(0); 341 } 342 343 #undef __FUNCT__ 344 #define __FUNCT__ "TaoSetFromOptions" 345 /*@ 346 TaoSetFromOptions - Sets various Tao parameters from user 347 options. 348 349 Collective on Tao 350 351 Input Paremeter: 352 . tao - the Tao solver context 353 354 options Database Keys: 355 + -tao_type <type> - The algorithm that TAO uses (lmvm, nls, etc.) 356 . -tao_gatol <gatol> - absolute error tolerance for ||gradient|| 357 . -tao_grtol <grtol> - relative error tolerance for ||gradient|| 358 . -tao_gttol <gttol> - reduction of ||gradient|| relative to initial gradient 359 . -tao_max_it <max> - sets maximum number of iterations 360 . -tao_max_funcs <max> - sets maximum number of function evaluations 361 . -tao_fmin <fmin> - stop if function value reaches fmin 362 . -tao_steptol <tol> - stop if trust region radius less than <tol> 363 . -tao_trust0 <t> - initial trust region radius 364 . -tao_monitor - prints function value and residual at each iteration 365 . -tao_smonitor - same as tao_monitor, but truncates very small values 366 . -tao_cmonitor - prints function value, residual, and constraint norm at each iteration 367 . -tao_view_solution - prints solution vector at each iteration 368 . -tao_view_separableobjective - prints separable objective vector at each iteration 369 . -tao_view_step - prints step direction vector at each iteration 370 . -tao_view_gradient - prints gradient vector at each iteration 371 . -tao_draw_solution - graphically view solution vector at each iteration 372 . -tao_draw_step - graphically view step vector at each iteration 373 . -tao_draw_gradient - graphically view gradient at each iteration 374 . -tao_fd_gradient - use gradient computed with finite differences 375 . -tao_cancelmonitors - cancels all monitors (except those set with command line) 376 . -tao_view - prints information about the Tao after solving 377 - -tao_converged_reason - prints the reason TAO stopped iterating 378 379 Notes: 380 To see all options, run your program with the -help option or consult the 381 user's manual. Should be called after TaoCreate() but before TaoSolve() 382 383 Level: beginner 384 @*/ 385 PetscErrorCode TaoSetFromOptions(Tao tao) 386 { 387 PetscErrorCode ierr; 388 const TaoType default_type = TAOLMVM; 389 char type[256], monfilename[PETSC_MAX_PATH_LEN]; 390 PetscViewer monviewer; 391 PetscBool flg; 392 MPI_Comm comm; 393 394 PetscFunctionBegin; 395 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 396 ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr); 397 398 /* So no warnings are given about unused options */ 399 ierr = PetscOptionsHasName(((PetscObject)tao)->options,((PetscObject)tao)->prefix,"-tao_ls_type",&flg);CHKERRQ(ierr); 400 401 ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr); 402 { 403 ierr = TaoRegisterAll();CHKERRQ(ierr); 404 if (((PetscObject)tao)->type_name) { 405 default_type = ((PetscObject)tao)->type_name; 406 } 407 /* Check for type from options */ 408 ierr = PetscOptionsFList("-tao_type","Tao Solver type","TaoSetType",TaoList,default_type,type,256,&flg);CHKERRQ(ierr); 409 if (flg) { 410 ierr = TaoSetType(tao,type);CHKERRQ(ierr); 411 } else if (!((PetscObject)tao)->type_name) { 412 ierr = TaoSetType(tao,default_type);CHKERRQ(ierr); 413 } 414 415 ierr = PetscOptionsReal("-tao_catol","Stop if constraints violations within","TaoSetConstraintTolerances",tao->catol,&tao->catol,&flg);CHKERRQ(ierr); 416 if (flg) tao->catol_changed=PETSC_TRUE; 417 ierr = PetscOptionsReal("-tao_crtol","Stop if relative contraint violations within","TaoSetConstraintTolerances",tao->crtol,&tao->crtol,&flg);CHKERRQ(ierr); 418 if (flg) tao->crtol_changed=PETSC_TRUE; 419 ierr = PetscOptionsReal("-tao_gatol","Stop if norm of gradient less than","TaoSetTolerances",tao->gatol,&tao->gatol,&flg);CHKERRQ(ierr); 420 if (flg) tao->gatol_changed=PETSC_TRUE; 421 ierr = PetscOptionsReal("-tao_grtol","Stop if norm of gradient divided by the function value is less than","TaoSetTolerances",tao->grtol,&tao->grtol,&flg);CHKERRQ(ierr); 422 if (flg) tao->grtol_changed=PETSC_TRUE; 423 ierr = PetscOptionsReal("-tao_gttol","Stop if the norm of the gradient is less than the norm of the initial gradient times tol","TaoSetTolerances",tao->gttol,&tao->gttol,&flg);CHKERRQ(ierr); 424 if (flg) tao->gttol_changed=PETSC_TRUE; 425 ierr = PetscOptionsInt("-tao_max_it","Stop if iteration number exceeds","TaoSetMaximumIterations",tao->max_it,&tao->max_it,&flg);CHKERRQ(ierr); 426 if (flg) tao->max_it_changed=PETSC_TRUE; 427 ierr = PetscOptionsInt("-tao_max_funcs","Stop if number of function evaluations exceeds","TaoSetMaximumFunctionEvaluations",tao->max_funcs,&tao->max_funcs,&flg);CHKERRQ(ierr); 428 if (flg) tao->max_funcs_changed=PETSC_TRUE; 429 ierr = PetscOptionsReal("-tao_fmin","Stop if function less than","TaoSetFunctionLowerBound",tao->fmin,&tao->fmin,&flg);CHKERRQ(ierr); 430 if (flg) tao->fmin_changed=PETSC_TRUE; 431 ierr = PetscOptionsReal("-tao_steptol","Stop if step size or trust region radius less than","",tao->steptol,&tao->steptol,&flg);CHKERRQ(ierr); 432 if (flg) tao->steptol_changed=PETSC_TRUE; 433 ierr = PetscOptionsReal("-tao_trust0","Initial trust region radius","TaoSetTrustRegionRadius",tao->trust0,&tao->trust0,&flg);CHKERRQ(ierr); 434 if (flg) tao->trust0_changed=PETSC_TRUE; 435 ierr = PetscOptionsString("-tao_view_solution","view solution vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 436 if (flg) { 437 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 438 ierr = TaoSetMonitor(tao,TaoSolutionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 439 } 440 441 ierr = PetscOptionsBool("-tao_converged_reason","Print reason for TAO converged","TaoSolve",tao->printreason,&tao->printreason,NULL);CHKERRQ(ierr); 442 ierr = PetscOptionsString("-tao_view_gradient","view gradient vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 443 if (flg) { 444 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 445 ierr = TaoSetMonitor(tao,TaoGradientMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 446 } 447 448 ierr = PetscOptionsString("-tao_view_stepdirection","view step direction vector after each iteration","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 449 if (flg) { 450 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 451 ierr = TaoSetMonitor(tao,TaoStepDirectionMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 452 } 453 454 ierr = PetscOptionsString("-tao_view_separableobjective","view separable objective vector after each evaluation","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 455 if (flg) { 456 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 457 ierr = TaoSetMonitor(tao,TaoSeparableObjectiveMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 458 } 459 460 ierr = PetscOptionsString("-tao_monitor","Use the default convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 461 if (flg) { 462 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 463 ierr = TaoSetMonitor(tao,TaoDefaultMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 464 } 465 466 ierr = PetscOptionsString("-tao_smonitor","Use the short convergence monitor","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 467 if (flg) { 468 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 469 ierr = TaoSetMonitor(tao,TaoDefaultSMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 470 } 471 472 ierr = PetscOptionsString("-tao_cmonitor","Use the default convergence monitor with constraint norm","TaoSetMonitor","stdout",monfilename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); 473 if (flg) { 474 ierr = PetscViewerASCIIOpen(comm,monfilename,&monviewer);CHKERRQ(ierr); 475 ierr = TaoSetMonitor(tao,TaoDefaultCMonitor,monviewer,(PetscErrorCode (*)(void**))PetscViewerDestroy);CHKERRQ(ierr); 476 } 477 478 479 flg = PETSC_FALSE; 480 ierr = PetscOptionsBool("-tao_cancelmonitors","cancel all monitors and call any registered destroy routines","TaoCancelMonitors",flg,&flg,NULL);CHKERRQ(ierr); 481 if (flg) {ierr = TaoCancelMonitors(tao);CHKERRQ(ierr);} 482 483 flg = PETSC_FALSE; 484 ierr = PetscOptionsBool("-tao_draw_solution","Plot solution vector at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 485 if (flg) { 486 ierr = TaoSetMonitor(tao,TaoDrawSolutionMonitor,NULL,NULL);CHKERRQ(ierr); 487 } 488 489 flg = PETSC_FALSE; 490 ierr = PetscOptionsBool("-tao_draw_step","plots step direction at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 491 if (flg) { 492 ierr = TaoSetMonitor(tao,TaoDrawStepMonitor,NULL,NULL);CHKERRQ(ierr); 493 } 494 495 flg = PETSC_FALSE; 496 ierr = PetscOptionsBool("-tao_draw_gradient","plots gradient at each iteration","TaoSetMonitor",flg,&flg,NULL);CHKERRQ(ierr); 497 if (flg) { 498 ierr = TaoSetMonitor(tao,TaoDrawGradientMonitor,NULL,NULL);CHKERRQ(ierr); 499 } 500 flg = PETSC_FALSE; 501 ierr = PetscOptionsBool("-tao_fd_gradient","compute gradient using finite differences","TaoDefaultComputeGradient",flg,&flg,NULL);CHKERRQ(ierr); 502 if (flg) { 503 ierr = TaoSetGradientRoutine(tao,TaoDefaultComputeGradient,NULL);CHKERRQ(ierr); 504 } 505 ierr = PetscOptionsEnum("-tao_subset_type","subset type", "", TaoSubSetTypes,(PetscEnum)tao->subset_type, (PetscEnum*)&tao->subset_type, 0);CHKERRQ(ierr); 506 507 if (tao->ops->setfromoptions) { 508 ierr = (*tao->ops->setfromoptions)(PetscOptionsObject,tao);CHKERRQ(ierr); 509 } 510 } 511 ierr = PetscOptionsEnd();CHKERRQ(ierr); 512 PetscFunctionReturn(0); 513 } 514 515 #undef __FUNCT__ 516 #define __FUNCT__ "TaoView" 517 /*@C 518 TaoView - Prints information about the Tao 519 520 Collective on Tao 521 522 InputParameters: 523 + tao - the Tao context 524 - viewer - visualization context 525 526 Options Database Key: 527 . -tao_view - Calls TaoView() at the end of TaoSolve() 528 529 Notes: 530 The available visualization contexts include 531 + PETSC_VIEWER_STDOUT_SELF - standard output (default) 532 - PETSC_VIEWER_STDOUT_WORLD - synchronized standard 533 output where only the first processor opens 534 the file. All other processors send their 535 data to the first processor to print. 536 537 Level: beginner 538 539 .seealso: PetscViewerASCIIOpen() 540 @*/ 541 PetscErrorCode TaoView(Tao tao, PetscViewer viewer) 542 { 543 PetscErrorCode ierr; 544 PetscBool isascii,isstring; 545 const TaoType type; 546 547 PetscFunctionBegin; 548 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 549 if (!viewer) { 550 ierr = PetscViewerASCIIGetStdout(((PetscObject)tao)->comm,&viewer);CHKERRQ(ierr); 551 } 552 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 553 PetscCheckSameComm(tao,1,viewer,2); 554 555 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); 556 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 557 if (isascii) { 558 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)tao,viewer);CHKERRQ(ierr); 559 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 560 561 if (tao->ops->view) { 562 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 563 ierr = (*tao->ops->view)(tao,viewer);CHKERRQ(ierr); 564 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 565 } 566 if (tao->linesearch) { 567 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)(tao->linesearch),viewer);CHKERRQ(ierr); 568 } 569 if (tao->ksp) { 570 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)(tao->ksp),viewer);CHKERRQ(ierr); 571 ierr = PetscViewerASCIIPrintf(viewer,"total KSP iterations: %D\n",tao->ksp_tot_its);CHKERRQ(ierr); 572 } 573 if (tao->XL || tao->XU) { 574 ierr = PetscViewerASCIIPrintf(viewer,"Active Set subset type: %s\n",TaoSubSetTypes[tao->subset_type]);CHKERRQ(ierr); 575 } 576 577 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: gatol=%g,",(double)tao->gatol);CHKERRQ(ierr); 578 ierr=PetscViewerASCIIPrintf(viewer," steptol=%g,",(double)tao->steptol);CHKERRQ(ierr); 579 ierr=PetscViewerASCIIPrintf(viewer," gttol=%g\n",(double)tao->gttol);CHKERRQ(ierr); 580 581 ierr = PetscViewerASCIIPrintf(viewer,"Residual in Function/Gradient:=%g\n",(double)tao->residual);CHKERRQ(ierr); 582 583 if (tao->cnorm>0 || tao->catol>0 || tao->crtol>0){ 584 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances:");CHKERRQ(ierr); 585 ierr=PetscViewerASCIIPrintf(viewer," catol=%g,",(double)tao->catol);CHKERRQ(ierr); 586 ierr=PetscViewerASCIIPrintf(viewer," crtol=%g\n",(double)tao->crtol);CHKERRQ(ierr); 587 ierr = PetscViewerASCIIPrintf(viewer,"Residual in Constraints:=%g\n",(double)tao->cnorm);CHKERRQ(ierr); 588 } 589 590 if (tao->trust < tao->steptol){ 591 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: steptol=%g\n",(double)tao->steptol);CHKERRQ(ierr); 592 ierr=PetscViewerASCIIPrintf(viewer,"Final trust region radius:=%g\n",(double)tao->trust);CHKERRQ(ierr); 593 } 594 595 if (tao->fmin>-1.e25){ 596 ierr=PetscViewerASCIIPrintf(viewer,"convergence tolerances: function minimum=%g\n",(double)tao->fmin);CHKERRQ(ierr); 597 } 598 ierr = PetscViewerASCIIPrintf(viewer,"Objective value=%g\n",(double)tao->fc);CHKERRQ(ierr); 599 600 ierr = PetscViewerASCIIPrintf(viewer,"total number of iterations=%D, ",tao->niter);CHKERRQ(ierr); 601 ierr = PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_it);CHKERRQ(ierr); 602 603 if (tao->nfuncs>0){ 604 ierr = PetscViewerASCIIPrintf(viewer,"total number of function evaluations=%D,",tao->nfuncs);CHKERRQ(ierr); 605 ierr = PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);CHKERRQ(ierr); 606 } 607 if (tao->ngrads>0){ 608 ierr = PetscViewerASCIIPrintf(viewer,"total number of gradient evaluations=%D,",tao->ngrads);CHKERRQ(ierr); 609 ierr = PetscViewerASCIIPrintf(viewer," max: %D\n",tao->max_funcs);CHKERRQ(ierr); 610 } 611 if (tao->nfuncgrads>0){ 612 ierr = PetscViewerASCIIPrintf(viewer,"total number of function/gradient evaluations=%D,",tao->nfuncgrads);CHKERRQ(ierr); 613 ierr = PetscViewerASCIIPrintf(viewer," (max: %D)\n",tao->max_funcs);CHKERRQ(ierr); 614 } 615 if (tao->nhess>0){ 616 ierr = PetscViewerASCIIPrintf(viewer,"total number of Hessian evaluations=%D\n",tao->nhess);CHKERRQ(ierr); 617 } 618 /* if (tao->linear_its>0){ 619 ierr = PetscViewerASCIIPrintf(viewer," total Krylov method iterations=%D\n",tao->linear_its);CHKERRQ(ierr); 620 }*/ 621 if (tao->nconstraints>0){ 622 ierr = PetscViewerASCIIPrintf(viewer,"total number of constraint function evaluations=%D\n",tao->nconstraints);CHKERRQ(ierr); 623 } 624 if (tao->njac>0){ 625 ierr = PetscViewerASCIIPrintf(viewer,"total number of Jacobian evaluations=%D\n",tao->njac);CHKERRQ(ierr); 626 } 627 628 if (tao->reason>0){ 629 ierr = PetscViewerASCIIPrintf(viewer, "Solution converged: ");CHKERRQ(ierr); 630 switch (tao->reason) { 631 case TAO_CONVERGED_GATOL: 632 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)|| <= gatol\n");CHKERRQ(ierr); 633 break; 634 case TAO_CONVERGED_GRTOL: 635 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)||/|f(X)| <= grtol\n");CHKERRQ(ierr); 636 break; 637 case TAO_CONVERGED_GTTOL: 638 ierr = PetscViewerASCIIPrintf(viewer," ||g(X)||/||g(X0)|| <= gttol\n");CHKERRQ(ierr); 639 break; 640 case TAO_CONVERGED_STEPTOL: 641 ierr = PetscViewerASCIIPrintf(viewer," Steptol -- step size small\n");CHKERRQ(ierr); 642 break; 643 case TAO_CONVERGED_MINF: 644 ierr = PetscViewerASCIIPrintf(viewer," Minf -- f < fmin\n");CHKERRQ(ierr); 645 break; 646 case TAO_CONVERGED_USER: 647 ierr = PetscViewerASCIIPrintf(viewer," User Terminated\n");CHKERRQ(ierr); 648 break; 649 default: 650 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 651 break; 652 } 653 654 } else { 655 ierr = PetscViewerASCIIPrintf(viewer,"Solver terminated: %d",tao->reason);CHKERRQ(ierr); 656 switch (tao->reason) { 657 case TAO_DIVERGED_MAXITS: 658 ierr = PetscViewerASCIIPrintf(viewer," Maximum Iterations\n");CHKERRQ(ierr); 659 break; 660 case TAO_DIVERGED_NAN: 661 ierr = PetscViewerASCIIPrintf(viewer," NAN or Inf encountered\n");CHKERRQ(ierr); 662 break; 663 case TAO_DIVERGED_MAXFCN: 664 ierr = PetscViewerASCIIPrintf(viewer," Maximum Function Evaluations\n");CHKERRQ(ierr); 665 break; 666 case TAO_DIVERGED_LS_FAILURE: 667 ierr = PetscViewerASCIIPrintf(viewer," Line Search Failure\n");CHKERRQ(ierr); 668 break; 669 case TAO_DIVERGED_TR_REDUCTION: 670 ierr = PetscViewerASCIIPrintf(viewer," Trust Region too small\n");CHKERRQ(ierr); 671 break; 672 case TAO_DIVERGED_USER: 673 ierr = PetscViewerASCIIPrintf(viewer," User Terminated\n");CHKERRQ(ierr); 674 break; 675 default: 676 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 677 break; 678 } 679 } 680 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 681 } else if (isstring) { 682 ierr = TaoGetType(tao,&type);CHKERRQ(ierr); 683 ierr = PetscViewerStringSPrintf(viewer," %-3.3s",type);CHKERRQ(ierr); 684 } 685 PetscFunctionReturn(0); 686 } 687 688 #undef __FUNCT__ 689 #define __FUNCT__ "TaoSetTolerances" 690 /*@ 691 TaoSetTolerances - Sets parameters used in TAO convergence tests 692 693 Logically collective on Tao 694 695 Input Parameters: 696 + tao - the Tao context 697 . gatol - stop if norm of gradient is less than this 698 . grtol - stop if relative norm of gradient is less than this 699 - gttol - stop if norm of gradient is reduced by this factor 700 701 Options Database Keys: 702 + -tao_gatol <gatol> - Sets gatol 703 . -tao_grtol <grtol> - Sets grtol 704 - -tao_gttol <gttol> - Sets gttol 705 706 Stopping Criteria: 707 $ ||g(X)|| <= gatol 708 $ ||g(X)|| / |f(X)| <= grtol 709 $ ||g(X)|| / ||g(X0)|| <= gttol 710 711 Notes: 712 Use PETSC_DEFAULT to leave one or more tolerances unchanged. 713 714 Level: beginner 715 716 .seealso: TaoGetTolerances() 717 718 @*/ 719 PetscErrorCode TaoSetTolerances(Tao tao, PetscReal gatol, PetscReal grtol, PetscReal gttol) 720 { 721 PetscErrorCode ierr; 722 723 PetscFunctionBegin; 724 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 725 726 if (gatol != PETSC_DEFAULT) { 727 if (gatol<0) { 728 ierr = PetscInfo(tao,"Tried to set negative gatol -- ignored.\n");CHKERRQ(ierr); 729 } else { 730 tao->gatol = PetscMax(0,gatol); 731 tao->gatol_changed=PETSC_TRUE; 732 } 733 } 734 735 if (grtol != PETSC_DEFAULT) { 736 if (grtol<0) { 737 ierr = PetscInfo(tao,"Tried to set negative grtol -- ignored.\n");CHKERRQ(ierr); 738 } else { 739 tao->grtol = PetscMax(0,grtol); 740 tao->grtol_changed=PETSC_TRUE; 741 } 742 } 743 744 if (gttol != PETSC_DEFAULT) { 745 if (gttol<0) { 746 ierr = PetscInfo(tao,"Tried to set negative gttol -- ignored.\n");CHKERRQ(ierr); 747 } else { 748 tao->gttol = PetscMax(0,gttol); 749 tao->gttol_changed=PETSC_TRUE; 750 } 751 } 752 PetscFunctionReturn(0); 753 } 754 755 #undef __FUNCT__ 756 #define __FUNCT__ "TaoSetConstraintTolerances" 757 /*@ 758 TaoSetConstraintTolerances - Sets constraint tolerance parameters used in TAO convergence tests 759 760 Logically collective on Tao 761 762 Input Parameters: 763 + tao - the Tao context 764 . catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria 765 - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria 766 767 Options Database Keys: 768 + -tao_catol <catol> - Sets catol 769 - -tao_crtol <crtol> - Sets crtol 770 771 Notes: 772 Use PETSC_DEFAULT to leave any tolerance unchanged. 773 774 Level: intermediate 775 776 .seealso: TaoGetTolerances(), TaoGetConstraintTolerances(), TaoSetTolerances() 777 778 @*/ 779 PetscErrorCode TaoSetConstraintTolerances(Tao tao, PetscReal catol, PetscReal crtol) 780 { 781 PetscErrorCode ierr; 782 783 PetscFunctionBegin; 784 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 785 786 if (catol != PETSC_DEFAULT) { 787 if (catol<0) { 788 ierr = PetscInfo(tao,"Tried to set negative catol -- ignored.\n");CHKERRQ(ierr); 789 } else { 790 tao->catol = PetscMax(0,catol); 791 tao->catol_changed=PETSC_TRUE; 792 } 793 } 794 795 if (crtol != PETSC_DEFAULT) { 796 if (crtol<0) { 797 ierr = PetscInfo(tao,"Tried to set negative crtol -- ignored.\n");CHKERRQ(ierr); 798 } else { 799 tao->crtol = PetscMax(0,crtol); 800 tao->crtol_changed=PETSC_TRUE; 801 } 802 } 803 PetscFunctionReturn(0); 804 } 805 806 #undef __FUNCT__ 807 #define __FUNCT__ "TaoGetConstraintTolerances" 808 /*@ 809 TaoGetConstraintTolerances - Gets constraint tolerance parameters used in TAO convergence tests 810 811 Not ollective 812 813 Input Parameter: 814 . tao - the Tao context 815 816 Output Parameter: 817 + catol - absolute constraint tolerance, constraint norm must be less than catol for used for gatol convergence criteria 818 - crtol - relative contraint tolerance, constraint norm must be less than crtol for used for gatol, gttol convergence criteria 819 820 Level: intermediate 821 822 .seealso: TaoGetTolerances(), TaoSetTolerances(), TaoSetConstraintTolerances() 823 824 @*/ 825 PetscErrorCode TaoGetConstraintTolerances(Tao tao, PetscReal *catol, PetscReal *crtol) 826 { 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 829 if (catol) *catol = tao->catol; 830 if (crtol) *crtol = tao->crtol; 831 PetscFunctionReturn(0); 832 } 833 834 #undef __FUNCT__ 835 #define __FUNCT__ "TaoSetFunctionLowerBound" 836 /*@ 837 TaoSetFunctionLowerBound - Sets a bound on the solution objective value. 838 When an approximate solution with an objective value below this number 839 has been found, the solver will terminate. 840 841 Logically Collective on Tao 842 843 Input Parameters: 844 + tao - the Tao solver context 845 - fmin - the tolerance 846 847 Options Database Keys: 848 . -tao_fmin <fmin> - sets the minimum function value 849 850 Level: intermediate 851 852 .seealso: TaoSetTolerances() 853 @*/ 854 PetscErrorCode TaoSetFunctionLowerBound(Tao tao,PetscReal fmin) 855 { 856 PetscFunctionBegin; 857 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 858 tao->fmin = fmin; 859 tao->fmin_changed=PETSC_TRUE; 860 PetscFunctionReturn(0); 861 } 862 863 #undef __FUNCT__ 864 #define __FUNCT__ "TaoGetFunctionLowerBound" 865 /*@ 866 TaoGetFunctionLowerBound - Gets the bound on the solution objective value. 867 When an approximate solution with an objective value below this number 868 has been found, the solver will terminate. 869 870 Not collective on Tao 871 872 Input Parameters: 873 . tao - the Tao solver context 874 875 OutputParameters: 876 . fmin - the minimum function value 877 878 Level: intermediate 879 880 .seealso: TaoSetFunctionLowerBound() 881 @*/ 882 PetscErrorCode TaoGetFunctionLowerBound(Tao tao,PetscReal *fmin) 883 { 884 PetscFunctionBegin; 885 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 886 *fmin = tao->fmin; 887 PetscFunctionReturn(0); 888 } 889 890 #undef __FUNCT__ 891 #define __FUNCT__ "TaoSetMaximumFunctionEvaluations" 892 /*@ 893 TaoSetMaximumFunctionEvaluations - Sets a maximum number of 894 function evaluations. 895 896 Logically Collective on Tao 897 898 Input Parameters: 899 + tao - the Tao solver context 900 - nfcn - the maximum number of function evaluations (>=0) 901 902 Options Database Keys: 903 . -tao_max_funcs <nfcn> - sets the maximum number of function evaluations 904 905 Level: intermediate 906 907 .seealso: TaoSetTolerances(), TaoSetMaximumIterations() 908 @*/ 909 910 PetscErrorCode TaoSetMaximumFunctionEvaluations(Tao tao,PetscInt nfcn) 911 { 912 PetscFunctionBegin; 913 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 914 tao->max_funcs = PetscMax(0,nfcn); 915 tao->max_funcs_changed=PETSC_TRUE; 916 PetscFunctionReturn(0); 917 } 918 919 #undef __FUNCT__ 920 #define __FUNCT__ "TaoGetMaximumFunctionEvaluations" 921 /*@ 922 TaoGetMaximumFunctionEvaluations - Sets a maximum number of 923 function evaluations. 924 925 Not Collective 926 927 Input Parameters: 928 . tao - the Tao solver context 929 930 Output Parameters: 931 . nfcn - the maximum number of function evaluations 932 933 Level: intermediate 934 935 .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumIterations() 936 @*/ 937 938 PetscErrorCode TaoGetMaximumFunctionEvaluations(Tao tao,PetscInt *nfcn) 939 { 940 PetscFunctionBegin; 941 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 942 *nfcn = tao->max_funcs; 943 PetscFunctionReturn(0); 944 } 945 946 #undef __FUNCT__ 947 #define __FUNCT__ "TaoGetCurrentFunctionEvaluations" 948 /*@ 949 TaoGetCurrentFunctionEvaluations - Get current number of 950 function evaluations. 951 952 Not Collective 953 954 Input Parameters: 955 . tao - the Tao solver context 956 957 Output Parameters: 958 . nfuncs - the current number of function evaluations 959 960 Level: intermediate 961 962 .seealso: TaoSetMaximumFunctionEvaluations(), TaoGetMaximumFunctionEvaluations(), TaoGetMaximumIterations() 963 @*/ 964 965 PetscErrorCode TaoGetCurrentFunctionEvaluations(Tao tao,PetscInt *nfuncs) 966 { 967 PetscFunctionBegin; 968 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 969 *nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads); 970 PetscFunctionReturn(0); 971 } 972 973 #undef __FUNCT__ 974 #define __FUNCT__ "TaoSetMaximumIterations" 975 /*@ 976 TaoSetMaximumIterations - Sets a maximum number of iterates. 977 978 Logically Collective on Tao 979 980 Input Parameters: 981 + tao - the Tao solver context 982 - maxits - the maximum number of iterates (>=0) 983 984 Options Database Keys: 985 . -tao_max_it <its> - sets the maximum number of iterations 986 987 Level: intermediate 988 989 .seealso: TaoSetTolerances(), TaoSetMaximumFunctionEvaluations() 990 @*/ 991 PetscErrorCode TaoSetMaximumIterations(Tao tao,PetscInt maxits) 992 { 993 PetscFunctionBegin; 994 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 995 tao->max_it = PetscMax(0,maxits); 996 tao->max_it_changed=PETSC_TRUE; 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "TaoGetMaximumIterations" 1002 /*@ 1003 TaoGetMaximumIterations - Sets a maximum number of iterates. 1004 1005 Not Collective 1006 1007 Input Parameters: 1008 . tao - the Tao solver context 1009 1010 Output Parameters: 1011 . maxits - the maximum number of iterates 1012 1013 Level: intermediate 1014 1015 .seealso: TaoSetMaximumIterations(), TaoGetMaximumFunctionEvaluations() 1016 @*/ 1017 PetscErrorCode TaoGetMaximumIterations(Tao tao,PetscInt *maxits) 1018 { 1019 PetscFunctionBegin; 1020 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1021 *maxits = tao->max_it; 1022 PetscFunctionReturn(0); 1023 } 1024 1025 #undef __FUNCT__ 1026 #define __FUNCT__ "TaoSetInitialTrustRegionRadius" 1027 /*@ 1028 TaoSetInitialTrustRegionRadius - Sets the initial trust region radius. 1029 1030 Logically collective on Tao 1031 1032 Input Parameter: 1033 + tao - a TAO optimization solver 1034 - radius - the trust region radius 1035 1036 Level: intermediate 1037 1038 Options Database Key: 1039 . -tao_trust0 <t0> - sets initial trust region radius 1040 1041 .seealso: TaoGetTrustRegionRadius(), TaoSetTrustRegionTolerance() 1042 @*/ 1043 PetscErrorCode TaoSetInitialTrustRegionRadius(Tao tao, PetscReal radius) 1044 { 1045 PetscFunctionBegin; 1046 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1047 tao->trust0 = PetscMax(0.0,radius); 1048 tao->trust0_changed=PETSC_TRUE; 1049 PetscFunctionReturn(0); 1050 } 1051 1052 #undef __FUNCT__ 1053 #define __FUNCT__ "TaoGetInitialTrustRegionRadius" 1054 /*@ 1055 TaoGetInitialTrustRegionRadius - Sets the initial trust region radius. 1056 1057 Not Collective 1058 1059 Input Parameter: 1060 . tao - a TAO optimization solver 1061 1062 Output Parameter: 1063 . radius - the trust region radius 1064 1065 Level: intermediate 1066 1067 .seealso: TaoSetInitialTrustRegionRadius(), TaoGetCurrentTrustRegionRadius() 1068 @*/ 1069 PetscErrorCode TaoGetInitialTrustRegionRadius(Tao tao, PetscReal *radius) 1070 { 1071 PetscFunctionBegin; 1072 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1073 *radius = tao->trust0; 1074 PetscFunctionReturn(0); 1075 } 1076 1077 #undef __FUNCT__ 1078 #define __FUNCT__ "TaoGetCurrentTrustRegionRadius" 1079 /*@ 1080 TaoGetCurrentTrustRegionRadius - Gets the current trust region radius. 1081 1082 Not Collective 1083 1084 Input Parameter: 1085 . tao - a TAO optimization solver 1086 1087 Output Parameter: 1088 . radius - the trust region radius 1089 1090 Level: intermediate 1091 1092 .seealso: TaoSetInitialTrustRegionRadius(), TaoGetInitialTrustRegionRadius() 1093 @*/ 1094 PetscErrorCode TaoGetCurrentTrustRegionRadius(Tao tao, PetscReal *radius) 1095 { 1096 PetscFunctionBegin; 1097 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1098 *radius = tao->trust; 1099 PetscFunctionReturn(0); 1100 } 1101 1102 #undef __FUNCT__ 1103 #define __FUNCT__ "TaoGetTolerances" 1104 /*@ 1105 TaoGetTolerances - gets the current values of tolerances 1106 1107 Not Collective 1108 1109 Input Parameters: 1110 . tao - the Tao context 1111 1112 Output Parameters: 1113 + gatol - stop if norm of gradient is less than this 1114 . grtol - stop if relative norm of gradient is less than this 1115 - gttol - stop if norm of gradient is reduced by a this factor 1116 1117 Note: NULL can be used as an argument if not all tolerances values are needed 1118 1119 .seealso TaoSetTolerances() 1120 1121 Level: intermediate 1122 @*/ 1123 PetscErrorCode TaoGetTolerances(Tao tao, PetscReal *gatol, PetscReal *grtol, PetscReal *gttol) 1124 { 1125 PetscFunctionBegin; 1126 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1127 if (gatol) *gatol=tao->gatol; 1128 if (grtol) *grtol=tao->grtol; 1129 if (gttol) *gttol=tao->gttol; 1130 PetscFunctionReturn(0); 1131 } 1132 1133 #undef __FUNCT__ 1134 #define __FUNCT__ "TaoGetKSP" 1135 /*@ 1136 TaoGetKSP - Gets the linear solver used by the optimization solver. 1137 Application writers should use TaoGetKSP if they need direct access 1138 to the PETSc KSP object. 1139 1140 Not Collective 1141 1142 Input Parameters: 1143 . tao - the TAO solver 1144 1145 Output Parameters: 1146 . ksp - the KSP linear solver used in the optimization solver 1147 1148 Level: intermediate 1149 1150 @*/ 1151 PetscErrorCode TaoGetKSP(Tao tao, KSP *ksp) 1152 { 1153 PetscFunctionBegin; 1154 *ksp = tao->ksp; 1155 PetscFunctionReturn(0); 1156 } 1157 1158 #undef __FUNCT__ 1159 #define __FUNCT__ "TaoGetLinearSolveIterations" 1160 /*@ 1161 TaoGetLinearSolveIterations - Gets the total number of linear iterations 1162 used by the TAO solver 1163 1164 Not Collective 1165 1166 Input Parameter: 1167 . tao - TAO context 1168 1169 Output Parameter: 1170 . lits - number of linear iterations 1171 1172 Notes: 1173 This counter is reset to zero for each successive call to TaoSolve() 1174 1175 Level: intermediate 1176 1177 .keywords: TAO 1178 1179 .seealso: TaoGetKSP() 1180 @*/ 1181 PetscErrorCode TaoGetLinearSolveIterations(Tao tao,PetscInt *lits) 1182 { 1183 PetscFunctionBegin; 1184 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1185 PetscValidIntPointer(lits,2); 1186 *lits = tao->ksp_tot_its; 1187 PetscFunctionReturn(0); 1188 } 1189 1190 #undef __FUNCT__ 1191 #define __FUNCT__ "TaoGetLineSearch" 1192 /*@ 1193 TaoGetLineSearch - Gets the line search used by the optimization solver. 1194 Application writers should use TaoGetLineSearch if they need direct access 1195 to the TaoLineSearch object. 1196 1197 Not Collective 1198 1199 Input Parameters: 1200 . tao - the TAO solver 1201 1202 Output Parameters: 1203 . ls - the line search used in the optimization solver 1204 1205 Level: intermediate 1206 1207 @*/ 1208 PetscErrorCode TaoGetLineSearch(Tao tao, TaoLineSearch *ls) 1209 { 1210 PetscFunctionBegin; 1211 *ls = tao->linesearch; 1212 PetscFunctionReturn(0); 1213 } 1214 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "TaoAddLineSearchCounts" 1217 /*@ 1218 TaoAddLineSearchCounts - Adds the number of function evaluations spent 1219 in the line search to the running total. 1220 1221 Input Parameters: 1222 + tao - the TAO solver 1223 - ls - the line search used in the optimization solver 1224 1225 Level: developer 1226 1227 .seealso: TaoLineSearchApply() 1228 @*/ 1229 PetscErrorCode TaoAddLineSearchCounts(Tao tao) 1230 { 1231 PetscErrorCode ierr; 1232 PetscBool flg; 1233 PetscInt nfeval,ngeval,nfgeval; 1234 1235 PetscFunctionBegin; 1236 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1237 if (tao->linesearch) { 1238 ierr = TaoLineSearchIsUsingTaoRoutines(tao->linesearch,&flg);CHKERRQ(ierr); 1239 if (!flg) { 1240 ierr = TaoLineSearchGetNumberFunctionEvaluations(tao->linesearch,&nfeval,&ngeval,&nfgeval);CHKERRQ(ierr); 1241 tao->nfuncs+=nfeval; 1242 tao->ngrads+=ngeval; 1243 tao->nfuncgrads+=nfgeval; 1244 } 1245 } 1246 PetscFunctionReturn(0); 1247 } 1248 1249 #undef __FUNCT__ 1250 #define __FUNCT__ "TaoGetSolutionVector" 1251 /*@ 1252 TaoGetSolutionVector - Returns the vector with the current TAO solution 1253 1254 Not Collective 1255 1256 Input Parameter: 1257 . tao - the Tao context 1258 1259 Output Parameter: 1260 . X - the current solution 1261 1262 Level: intermediate 1263 1264 Note: The returned vector will be the same object that was passed into TaoSetInitialVector() 1265 @*/ 1266 PetscErrorCode TaoGetSolutionVector(Tao tao, Vec *X) 1267 { 1268 PetscFunctionBegin; 1269 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1270 *X = tao->solution; 1271 PetscFunctionReturn(0); 1272 } 1273 1274 #undef __FUNCT__ 1275 #define __FUNCT__ "TaoGetGradientVector" 1276 /*@ 1277 TaoGetGradientVector - Returns the vector with the current TAO gradient 1278 1279 Not Collective 1280 1281 Input Parameter: 1282 . tao - the Tao context 1283 1284 Output Parameter: 1285 . G - the current solution 1286 1287 Level: intermediate 1288 @*/ 1289 PetscErrorCode TaoGetGradientVector(Tao tao, Vec *G) 1290 { 1291 PetscFunctionBegin; 1292 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1293 *G = tao->gradient; 1294 PetscFunctionReturn(0); 1295 } 1296 1297 #undef __FUNCT__ 1298 #define __FUNCT__ "TaoResetStatistics" 1299 /*@ 1300 TaoResetStatistics - Initialize the statistics used by TAO for all of the solvers. 1301 These statistics include the iteration number, residual norms, and convergence status. 1302 This routine gets called before solving each optimization problem. 1303 1304 Collective on Tao 1305 1306 Input Parameters: 1307 . solver - the Tao context 1308 1309 Level: developer 1310 1311 .seealso: TaoCreate(), TaoSolve() 1312 @*/ 1313 PetscErrorCode TaoResetStatistics(Tao tao) 1314 { 1315 PetscFunctionBegin; 1316 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1317 tao->niter = 0; 1318 tao->nfuncs = 0; 1319 tao->nfuncgrads = 0; 1320 tao->ngrads = 0; 1321 tao->nhess = 0; 1322 tao->njac = 0; 1323 tao->nconstraints = 0; 1324 tao->ksp_its = 0; 1325 tao->ksp_tot_its = 0; 1326 tao->reason = TAO_CONTINUE_ITERATING; 1327 tao->residual = 0.0; 1328 tao->cnorm = 0.0; 1329 tao->step = 0.0; 1330 tao->lsflag = PETSC_FALSE; 1331 if (tao->hist_reset) tao->hist_len=0; 1332 PetscFunctionReturn(0); 1333 } 1334 1335 #undef __FUNCT__ 1336 #define __FUNCT__ "TaoSetConvergenceTest" 1337 /*@C 1338 TaoSetConvergenceTest - Sets the function that is to be used to test 1339 for convergence o fthe iterative minimization solution. The new convergence 1340 testing routine will replace TAO's default convergence test. 1341 1342 Logically Collective on Tao 1343 1344 Input Parameters: 1345 + tao - the Tao object 1346 . conv - the routine to test for convergence 1347 - ctx - [optional] context for private data for the convergence routine 1348 (may be NULL) 1349 1350 Calling sequence of conv: 1351 $ PetscErrorCode conv(Tao tao, void *ctx) 1352 1353 + tao - the Tao object 1354 - ctx - [optional] convergence context 1355 1356 Note: The new convergence testing routine should call TaoSetConvergedReason(). 1357 1358 Level: advanced 1359 1360 .seealso: TaoSetConvergedReason(), TaoGetSolutionStatus(), TaoGetTolerances(), TaoSetMonitor 1361 1362 @*/ 1363 PetscErrorCode TaoSetConvergenceTest(Tao tao, PetscErrorCode (*conv)(Tao,void*), void *ctx) 1364 { 1365 PetscFunctionBegin; 1366 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1367 (tao)->ops->convergencetest = conv; 1368 (tao)->cnvP = ctx; 1369 PetscFunctionReturn(0); 1370 } 1371 1372 #undef __FUNCT__ 1373 #define __FUNCT__ "TaoSetMonitor" 1374 /*@C 1375 TaoSetMonitor - Sets an ADDITIONAL function that is to be used at every 1376 iteration of the solver to display the iteration's 1377 progress. 1378 1379 Logically Collective on Tao 1380 1381 Input Parameters: 1382 + tao - the Tao solver context 1383 . mymonitor - monitoring routine 1384 - mctx - [optional] user-defined context for private data for the 1385 monitor routine (may be NULL) 1386 1387 Calling sequence of mymonitor: 1388 $ int mymonitor(Tao tao,void *mctx) 1389 1390 + tao - the Tao solver context 1391 - mctx - [optional] monitoring context 1392 1393 1394 Options Database Keys: 1395 + -tao_monitor - sets TaoDefaultMonitor() 1396 . -tao_smonitor - sets short monitor 1397 . -tao_cmonitor - same as smonitor plus constraint norm 1398 . -tao_view_solution - view solution at each iteration 1399 . -tao_view_gradient - view gradient at each iteration 1400 . -tao_view_separableobjective - view separable objective function at each iteration 1401 - -tao_cancelmonitors - cancels all monitors that have been hardwired into a code by calls to TaoSetMonitor(), but does not cancel those set via the options database. 1402 1403 1404 Notes: 1405 Several different monitoring routines may be set by calling 1406 TaoSetMonitor() multiple times; all will be called in the 1407 order in which they were set. 1408 1409 Fortran Notes: Only one monitor function may be set 1410 1411 Level: intermediate 1412 1413 .seealso: TaoDefaultMonitor(), TaoCancelMonitors(), TaoSetDestroyRoutine() 1414 @*/ 1415 PetscErrorCode TaoSetMonitor(Tao tao, PetscErrorCode (*func)(Tao, void*), void *ctx,PetscErrorCode (*dest)(void**)) 1416 { 1417 PetscErrorCode ierr; 1418 PetscInt i; 1419 1420 PetscFunctionBegin; 1421 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1422 if (tao->numbermonitors >= MAXTAOMONITORS) SETERRQ1(PETSC_COMM_SELF,1,"Cannot attach another monitor -- max=",MAXTAOMONITORS); 1423 1424 for (i=0; i<tao->numbermonitors;i++) { 1425 if (func == tao->monitor[i] && dest == tao->monitordestroy[i] && ctx == tao->monitorcontext[i]) { 1426 if (dest) { 1427 ierr = (*dest)(&ctx);CHKERRQ(ierr); 1428 } 1429 PetscFunctionReturn(0); 1430 } 1431 } 1432 tao->monitor[tao->numbermonitors] = func; 1433 tao->monitorcontext[tao->numbermonitors] = ctx; 1434 tao->monitordestroy[tao->numbermonitors] = dest; 1435 ++tao->numbermonitors; 1436 PetscFunctionReturn(0); 1437 } 1438 1439 #undef __FUNCT__ 1440 #define __FUNCT__ "TaoCancelMonitors" 1441 /*@ 1442 TaoCancelMonitors - Clears all the monitor functions for a Tao object. 1443 1444 Logically Collective on Tao 1445 1446 Input Parameters: 1447 . tao - the Tao solver context 1448 1449 Options Database: 1450 . -tao_cancelmonitors - cancels all monitors that have been hardwired 1451 into a code by calls to TaoSetMonitor(), but does not cancel those 1452 set via the options database 1453 1454 Notes: 1455 There is no way to clear one specific monitor from a Tao object. 1456 1457 Level: advanced 1458 1459 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1460 @*/ 1461 PetscErrorCode TaoCancelMonitors(Tao tao) 1462 { 1463 PetscInt i; 1464 PetscErrorCode ierr; 1465 1466 PetscFunctionBegin; 1467 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 1468 for (i=0;i<tao->numbermonitors;i++) { 1469 if (tao->monitordestroy[i]) { 1470 ierr = (*tao->monitordestroy[i])(&tao->monitorcontext[i]);CHKERRQ(ierr); 1471 } 1472 } 1473 tao->numbermonitors=0; 1474 PetscFunctionReturn(0); 1475 } 1476 1477 #undef __FUNCT__ 1478 #define __FUNCT__ "TaoDefaultMonitor" 1479 /*@ 1480 TaoDefaultMonitor - Default routine for monitoring progress of the 1481 Tao solvers (default). This monitor prints the function value and gradient 1482 norm at each iteration. It can be turned on from the command line using the 1483 -tao_monitor option 1484 1485 Collective on Tao 1486 1487 Input Parameters: 1488 + tao - the Tao context 1489 - ctx - PetscViewer context or NULL 1490 1491 Options Database Keys: 1492 . -tao_monitor 1493 1494 Level: advanced 1495 1496 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1497 @*/ 1498 PetscErrorCode TaoDefaultMonitor(Tao tao, void *ctx) 1499 { 1500 PetscErrorCode ierr; 1501 PetscInt its; 1502 PetscReal fct,gnorm; 1503 PetscViewer viewer = (PetscViewer)ctx; 1504 1505 PetscFunctionBegin; 1506 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1507 its=tao->niter; 1508 fct=tao->fc; 1509 gnorm=tao->residual; 1510 ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);CHKERRQ(ierr); 1511 ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);CHKERRQ(ierr); 1512 if (gnorm >= PETSC_INFINITY) { 1513 ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");CHKERRQ(ierr); 1514 } else { 1515 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);CHKERRQ(ierr); 1516 } 1517 PetscFunctionReturn(0); 1518 } 1519 1520 #undef __FUNCT__ 1521 #define __FUNCT__ "TaoDefaultSMonitor" 1522 /*@ 1523 TaoDefaultSMonitor - Default routine for monitoring progress of the 1524 solver. Same as TaoDefaultMonitor() except 1525 it prints fewer digits of the residual as the residual gets smaller. 1526 This is because the later digits are meaningless and are often 1527 different on different machines; by using this routine different 1528 machines will usually generate the same output. It can be turned on 1529 by using the -tao_smonitor option 1530 1531 Collective on Tao 1532 1533 Input Parameters: 1534 + tao - the Tao context 1535 - ctx - PetscViewer context of type ASCII 1536 1537 Options Database Keys: 1538 . -tao_smonitor 1539 1540 Level: advanced 1541 1542 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1543 @*/ 1544 PetscErrorCode TaoDefaultSMonitor(Tao tao, void *ctx) 1545 { 1546 PetscErrorCode ierr; 1547 PetscInt its; 1548 PetscReal fct,gnorm; 1549 PetscViewer viewer = (PetscViewer)ctx; 1550 1551 PetscFunctionBegin; 1552 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1553 its=tao->niter; 1554 fct=tao->fc; 1555 gnorm=tao->residual; 1556 ierr=PetscViewerASCIIPrintf(viewer,"iter = %3D,",its);CHKERRQ(ierr); 1557 ierr=PetscViewerASCIIPrintf(viewer," Function value %g,",(double)fct);CHKERRQ(ierr); 1558 if (gnorm >= PETSC_INFINITY) { 1559 ierr=PetscViewerASCIIPrintf(viewer," Residual: Inf \n");CHKERRQ(ierr); 1560 } else if (gnorm > 1.e-6) { 1561 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g \n",(double)gnorm);CHKERRQ(ierr); 1562 } else if (gnorm > 1.e-11) { 1563 ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-6 \n");CHKERRQ(ierr); 1564 } else { 1565 ierr=PetscViewerASCIIPrintf(viewer," Residual: < 1.0e-11 \n");CHKERRQ(ierr); 1566 } 1567 PetscFunctionReturn(0); 1568 } 1569 1570 #undef __FUNCT__ 1571 #define __FUNCT__ "TaoDefaultCMonitor" 1572 /*@ 1573 TaoDefaultCMonitor - same as TaoDefaultMonitor() except 1574 it prints the norm of the constraints function. It can be turned on 1575 from the command line using the -tao_cmonitor option 1576 1577 Collective on Tao 1578 1579 Input Parameters: 1580 + tao - the Tao context 1581 - ctx - PetscViewer context or NULL 1582 1583 Options Database Keys: 1584 . -tao_cmonitor 1585 1586 Level: advanced 1587 1588 .seealso: TaoDefaultMonitor(), TaoSetMonitor() 1589 @*/ 1590 PetscErrorCode TaoDefaultCMonitor(Tao tao, void *ctx) 1591 { 1592 PetscErrorCode ierr; 1593 PetscInt its; 1594 PetscReal fct,gnorm; 1595 PetscViewer viewer = (PetscViewer)ctx; 1596 1597 PetscFunctionBegin; 1598 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1599 its=tao->niter; 1600 fct=tao->fc; 1601 gnorm=tao->residual; 1602 ierr=PetscViewerASCIIPrintf(viewer,"iter = %D,",its);CHKERRQ(ierr); 1603 ierr=PetscViewerASCIIPrintf(viewer," Function value: %g,",(double)fct);CHKERRQ(ierr); 1604 ierr=PetscViewerASCIIPrintf(viewer," Residual: %g ",(double)gnorm);CHKERRQ(ierr); 1605 ierr = PetscViewerASCIIPrintf(viewer," Constraint: %g \n",(double)tao->cnorm);CHKERRQ(ierr); 1606 PetscFunctionReturn(0); 1607 } 1608 1609 #undef __FUNCT__ 1610 #define __FUNCT__ "TaoSolutionMonitor" 1611 /*@C 1612 TaoSolutionMonitor - Views the solution at each iteration 1613 It can be turned on from the command line using the 1614 -tao_view_solution option 1615 1616 Collective on Tao 1617 1618 Input Parameters: 1619 + tao - the Tao context 1620 - ctx - PetscViewer context or NULL 1621 1622 Options Database Keys: 1623 . -tao_view_solution 1624 1625 Level: advanced 1626 1627 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1628 @*/ 1629 PetscErrorCode TaoSolutionMonitor(Tao tao, void *ctx) 1630 { 1631 PetscErrorCode ierr; 1632 PetscViewer viewer = (PetscViewer)ctx;; 1633 1634 PetscFunctionBegin; 1635 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1636 ierr = VecView(tao->solution, viewer);CHKERRQ(ierr); 1637 PetscFunctionReturn(0); 1638 } 1639 1640 #undef __FUNCT__ 1641 #define __FUNCT__ "TaoGradientMonitor" 1642 /*@C 1643 TaoGradientMonitor - Views the gradient at each iteration 1644 It can be turned on from the command line using the 1645 -tao_view_gradient option 1646 1647 Collective on Tao 1648 1649 Input Parameters: 1650 + tao - the Tao context 1651 - ctx - PetscViewer context or NULL 1652 1653 Options Database Keys: 1654 . -tao_view_gradient 1655 1656 Level: advanced 1657 1658 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1659 @*/ 1660 PetscErrorCode TaoGradientMonitor(Tao tao, void *ctx) 1661 { 1662 PetscErrorCode ierr; 1663 PetscViewer viewer = (PetscViewer)ctx; 1664 1665 PetscFunctionBegin; 1666 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1667 ierr = VecView(tao->gradient, viewer);CHKERRQ(ierr); 1668 PetscFunctionReturn(0); 1669 } 1670 1671 #undef __FUNCT__ 1672 #define __FUNCT__ "TaoStepDirectionMonitor" 1673 /*@C 1674 TaoStepDirectionMonitor - Views the gradient at each iteration 1675 It can be turned on from the command line using the 1676 -tao_view_gradient option 1677 1678 Collective on Tao 1679 1680 Input Parameters: 1681 + tao - the Tao context 1682 - ctx - PetscViewer context or NULL 1683 1684 Options Database Keys: 1685 . -tao_view_gradient 1686 1687 Level: advanced 1688 1689 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1690 @*/ 1691 PetscErrorCode TaoStepDirectionMonitor(Tao tao, void *ctx) 1692 { 1693 PetscErrorCode ierr; 1694 PetscViewer viewer = (PetscViewer)ctx; 1695 1696 PetscFunctionBegin; 1697 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1698 ierr = VecView(tao->stepdirection, viewer);CHKERRQ(ierr); 1699 PetscFunctionReturn(0); 1700 } 1701 1702 #undef __FUNCT__ 1703 #define __FUNCT__ "TaoDrawSolutionMonitor" 1704 /*@C 1705 TaoDrawSolutionMonitor - Plots the solution at each iteration 1706 It can be turned on from the command line using the 1707 -tao_draw_solution option 1708 1709 Collective on Tao 1710 1711 Input Parameters: 1712 + tao - the Tao context 1713 - ctx - PetscViewer context 1714 1715 Options Database Keys: 1716 . -tao_draw_solution 1717 1718 Level: advanced 1719 1720 .seealso: TaoSolutionMonitor(), TaoSetMonitor(), TaoDrawGradientMonitor 1721 @*/ 1722 PetscErrorCode TaoDrawSolutionMonitor(Tao tao, void *ctx) 1723 { 1724 PetscErrorCode ierr; 1725 PetscViewer viewer = (PetscViewer) ctx; 1726 1727 PetscFunctionBegin; 1728 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1729 ierr = VecView(tao->solution, viewer);CHKERRQ(ierr); 1730 PetscFunctionReturn(0); 1731 } 1732 1733 #undef __FUNCT__ 1734 #define __FUNCT__ "TaoDrawGradientMonitor" 1735 /*@C 1736 TaoDrawGradientMonitor - Plots the gradient at each iteration 1737 It can be turned on from the command line using the 1738 -tao_draw_gradient option 1739 1740 Collective on Tao 1741 1742 Input Parameters: 1743 + tao - the Tao context 1744 - ctx - PetscViewer context 1745 1746 Options Database Keys: 1747 . -tao_draw_gradient 1748 1749 Level: advanced 1750 1751 .seealso: TaoGradientMonitor(), TaoSetMonitor(), TaoDrawSolutionMonitor 1752 @*/ 1753 PetscErrorCode TaoDrawGradientMonitor(Tao tao, void *ctx) 1754 { 1755 PetscErrorCode ierr; 1756 PetscViewer viewer = (PetscViewer)ctx; 1757 1758 PetscFunctionBegin; 1759 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1760 ierr = VecView(tao->gradient, viewer);CHKERRQ(ierr); 1761 PetscFunctionReturn(0); 1762 } 1763 1764 #undef __FUNCT__ 1765 #define __FUNCT__ "TaoDrawStepMonitor" 1766 /*@C 1767 TaoDrawStepMonitor - Plots the step direction at each iteration 1768 It can be turned on from the command line using the 1769 -tao_draw_step option 1770 1771 Collective on Tao 1772 1773 Input Parameters: 1774 + tao - the Tao context 1775 - ctx - PetscViewer context 1776 1777 Options Database Keys: 1778 . -tao_draw_step 1779 1780 Level: advanced 1781 1782 .seealso: TaoSetMonitor(), TaoDrawSolutionMonitor 1783 @*/ 1784 PetscErrorCode TaoDrawStepMonitor(Tao tao, void *ctx) 1785 { 1786 PetscErrorCode ierr; 1787 PetscViewer viewer = (PetscViewer)(ctx); 1788 1789 PetscFunctionBegin; 1790 ierr = VecView(tao->stepdirection, viewer);CHKERRQ(ierr); 1791 PetscFunctionReturn(0); 1792 } 1793 1794 #undef __FUNCT__ 1795 #define __FUNCT__ "TaoSeparableObjectiveMonitor" 1796 /*@C 1797 TaoSeparableObjectiveMonitor - Views the separable objective function at each iteration 1798 It can be turned on from the command line using the 1799 -tao_view_separableobjective option 1800 1801 Collective on Tao 1802 1803 Input Parameters: 1804 + tao - the Tao context 1805 - ctx - PetscViewer context or NULL 1806 1807 Options Database Keys: 1808 . -tao_view_separableobjective 1809 1810 Level: advanced 1811 1812 .seealso: TaoDefaultSMonitor(), TaoSetMonitor() 1813 @*/ 1814 PetscErrorCode TaoSeparableObjectiveMonitor(Tao tao, void *ctx) 1815 { 1816 PetscErrorCode ierr; 1817 PetscViewer viewer = (PetscViewer)ctx; 1818 1819 PetscFunctionBegin; 1820 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 1821 ierr = VecView(tao->sep_objective,viewer);CHKERRQ(ierr); 1822 PetscFunctionReturn(0); 1823 } 1824 1825 #undef __FUNCT__ 1826 #define __FUNCT__ "TaoDefaultConvergenceTest" 1827 /*@ 1828 TaoDefaultConvergenceTest - Determines whether the solver should continue iterating 1829 or terminate. 1830 1831 Collective on Tao 1832 1833 Input Parameters: 1834 + tao - the Tao context 1835 - dummy - unused dummy context 1836 1837 Output Parameter: 1838 . reason - for terminating 1839 1840 Notes: 1841 This routine checks the residual in the optimality conditions, the 1842 relative residual in the optimity conditions, the number of function 1843 evaluations, and the function value to test convergence. Some 1844 solvers may use different convergence routines. 1845 1846 Level: developer 1847 1848 .seealso: TaoSetTolerances(),TaoGetConvergedReason(),TaoSetConvergedReason() 1849 @*/ 1850 1851 PetscErrorCode TaoDefaultConvergenceTest(Tao tao,void *dummy) 1852 { 1853 PetscInt niter=tao->niter, nfuncs=PetscMax(tao->nfuncs,tao->nfuncgrads); 1854 PetscInt max_funcs=tao->max_funcs; 1855 PetscReal gnorm=tao->residual, gnorm0=tao->gnorm0; 1856 PetscReal f=tao->fc, steptol=tao->steptol,trradius=tao->step; 1857 PetscReal gatol=tao->gatol,grtol=tao->grtol,gttol=tao->gttol; 1858 PetscReal catol=tao->catol,crtol=tao->crtol; 1859 PetscReal fmin=tao->fmin, cnorm=tao->cnorm; 1860 TaoConvergedReason reason=tao->reason; 1861 PetscErrorCode ierr; 1862 1863 PetscFunctionBegin; 1864 PetscValidHeaderSpecific(tao, TAO_CLASSID,1); 1865 if (reason != TAO_CONTINUE_ITERATING) { 1866 PetscFunctionReturn(0); 1867 } 1868 1869 if (PetscIsInfOrNanReal(f)) { 1870 ierr = PetscInfo(tao,"Failed to converged, function value is Inf or NaN\n");CHKERRQ(ierr); 1871 reason = TAO_DIVERGED_NAN; 1872 } else if (f <= fmin && cnorm <=catol) { 1873 ierr = PetscInfo2(tao,"Converged due to function value %g < minimum function value %g\n", (double)f,(double)fmin);CHKERRQ(ierr); 1874 reason = TAO_CONVERGED_MINF; 1875 } else if (gnorm<= gatol && cnorm <=catol) { 1876 ierr = PetscInfo2(tao,"Converged due to residual norm ||g(X)||=%g < %g\n",(double)gnorm,(double)gatol);CHKERRQ(ierr); 1877 reason = TAO_CONVERGED_GATOL; 1878 } else if ( f!=0 && PetscAbsReal(gnorm/f) <= grtol && cnorm <= crtol) { 1879 ierr = PetscInfo2(tao,"Converged due to residual ||g(X)||/|f(X)| =%g < %g\n",(double)(gnorm/f),(double)grtol);CHKERRQ(ierr); 1880 reason = TAO_CONVERGED_GRTOL; 1881 } else if (gnorm0 != 0 && ((gttol == 0 && gnorm == 0) || gnorm/gnorm0 < gttol) && cnorm <= crtol) { 1882 ierr = PetscInfo2(tao,"Converged due to relative residual norm ||g(X)||/||g(X0)|| = %g < %g\n",(double)(gnorm/gnorm0),(double)gttol);CHKERRQ(ierr); 1883 reason = TAO_CONVERGED_GTTOL; 1884 } else if (nfuncs > max_funcs){ 1885 ierr = PetscInfo2(tao,"Exceeded maximum number of function evaluations: %D > %D\n", nfuncs,max_funcs);CHKERRQ(ierr); 1886 reason = TAO_DIVERGED_MAXFCN; 1887 } else if ( tao->lsflag != 0 ){ 1888 ierr = PetscInfo(tao,"Tao Line Search failure.\n");CHKERRQ(ierr); 1889 reason = TAO_DIVERGED_LS_FAILURE; 1890 } else if (trradius < steptol && niter > 0){ 1891 ierr = PetscInfo2(tao,"Trust region/step size too small: %g < %g\n", (double)trradius,(double)steptol);CHKERRQ(ierr); 1892 reason = TAO_CONVERGED_STEPTOL; 1893 } else if (niter > tao->max_it) { 1894 ierr = PetscInfo2(tao,"Exceeded maximum number of iterations: %D > %D\n",niter,tao->max_it);CHKERRQ(ierr); 1895 reason = TAO_DIVERGED_MAXITS; 1896 } else { 1897 reason = TAO_CONTINUE_ITERATING; 1898 } 1899 tao->reason = reason; 1900 PetscFunctionReturn(0); 1901 } 1902 1903 #undef __FUNCT__ 1904 #define __FUNCT__ "TaoSetOptionsPrefix" 1905 /*@C 1906 TaoSetOptionsPrefix - Sets the prefix used for searching for all 1907 TAO options in the database. 1908 1909 1910 Logically Collective on Tao 1911 1912 Input Parameters: 1913 + tao - the Tao context 1914 - prefix - the prefix string to prepend to all TAO option requests 1915 1916 Notes: 1917 A hyphen (-) must NOT be given at the beginning of the prefix name. 1918 The first character of all runtime options is AUTOMATICALLY the hyphen. 1919 1920 For example, to distinguish between the runtime options for two 1921 different TAO solvers, one could call 1922 .vb 1923 TaoSetOptionsPrefix(tao1,"sys1_") 1924 TaoSetOptionsPrefix(tao2,"sys2_") 1925 .ve 1926 1927 This would enable use of different options for each system, such as 1928 .vb 1929 -sys1_tao_method blmvm -sys1_tao_gtol 1.e-3 1930 -sys2_tao_method lmvm -sys2_tao_gtol 1.e-4 1931 .ve 1932 1933 1934 Level: advanced 1935 1936 .seealso: TaoAppendOptionsPrefix(), TaoGetOptionsPrefix() 1937 @*/ 1938 1939 PetscErrorCode TaoSetOptionsPrefix(Tao tao, const char p[]) 1940 { 1941 PetscErrorCode ierr; 1942 1943 PetscFunctionBegin; 1944 ierr = PetscObjectSetOptionsPrefix((PetscObject)tao,p);CHKERRQ(ierr); 1945 if (tao->linesearch) { 1946 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,p);CHKERRQ(ierr); 1947 } 1948 if (tao->ksp) { 1949 ierr = KSPSetOptionsPrefix(tao->ksp,p);CHKERRQ(ierr); 1950 } 1951 PetscFunctionReturn(0); 1952 } 1953 1954 #undef __FUNCT__ 1955 #define __FUNCT__ "TaoAppendOptionsPrefix" 1956 /*@C 1957 TaoAppendOptionsPrefix - Appends to the prefix used for searching for all 1958 TAO options in the database. 1959 1960 1961 Logically Collective on Tao 1962 1963 Input Parameters: 1964 + tao - the Tao solver context 1965 - prefix - the prefix string to prepend to all TAO option requests 1966 1967 Notes: 1968 A hyphen (-) must NOT be given at the beginning of the prefix name. 1969 The first character of all runtime options is AUTOMATICALLY the hyphen. 1970 1971 1972 Level: advanced 1973 1974 .seealso: TaoSetOptionsPrefix(), TaoGetOptionsPrefix() 1975 @*/ 1976 PetscErrorCode TaoAppendOptionsPrefix(Tao tao, const char p[]) 1977 { 1978 PetscErrorCode ierr; 1979 1980 PetscFunctionBegin; 1981 ierr = PetscObjectAppendOptionsPrefix((PetscObject)tao,p);CHKERRQ(ierr); 1982 if (tao->linesearch) { 1983 ierr = TaoLineSearchSetOptionsPrefix(tao->linesearch,p);CHKERRQ(ierr); 1984 } 1985 if (tao->ksp) { 1986 ierr = KSPSetOptionsPrefix(tao->ksp,p);CHKERRQ(ierr); 1987 } 1988 PetscFunctionReturn(0); 1989 } 1990 1991 #undef __FUNCT__ 1992 #define __FUNCT__ "TaoGetOptionsPrefix" 1993 /*@C 1994 TaoGetOptionsPrefix - Gets the prefix used for searching for all 1995 TAO options in the database 1996 1997 Not Collective 1998 1999 Input Parameters: 2000 . tao - the Tao context 2001 2002 Output Parameters: 2003 . prefix - pointer to the prefix string used is returned 2004 2005 Notes: On the fortran side, the user should pass in a string 'prefix' of 2006 sufficient length to hold the prefix. 2007 2008 Level: advanced 2009 2010 .seealso: TaoSetOptionsPrefix(), TaoAppendOptionsPrefix() 2011 @*/ 2012 PetscErrorCode TaoGetOptionsPrefix(Tao tao, const char *p[]) 2013 { 2014 return PetscObjectGetOptionsPrefix((PetscObject)tao,p); 2015 } 2016 2017 #undef __FUNCT__ 2018 #define __FUNCT__ "TaoSetType" 2019 /*@C 2020 TaoSetType - Sets the method for the unconstrained minimization solver. 2021 2022 Collective on Tao 2023 2024 Input Parameters: 2025 + solver - the Tao solver context 2026 - type - a known method 2027 2028 Options Database Key: 2029 . -tao_type <type> - Sets the method; use -help for a list 2030 of available methods (for instance, "-tao_type lmvm" or "-tao_type tron") 2031 2032 Available methods include: 2033 + nls - Newton's method with line search for unconstrained minimization 2034 . ntr - Newton's method with trust region for unconstrained minimization 2035 . ntl - Newton's method with trust region, line search for unconstrained minimization 2036 . lmvm - Limited memory variable metric method for unconstrained minimization 2037 . cg - Nonlinear conjugate gradient method for unconstrained minimization 2038 . nm - Nelder-Mead algorithm for derivate-free unconstrained minimization 2039 . tron - Newton Trust Region method for bound constrained minimization 2040 . gpcg - Newton Trust Region method for quadratic bound constrained minimization 2041 . blmvm - Limited memory variable metric method for bound constrained minimization 2042 - pounders - Model-based algorithm pounder extended for nonlinear least squares 2043 2044 Level: intermediate 2045 2046 .seealso: TaoCreate(), TaoGetType(), TaoType 2047 2048 @*/ 2049 PetscErrorCode TaoSetType(Tao tao, const TaoType type) 2050 { 2051 PetscErrorCode ierr; 2052 PetscErrorCode (*create_xxx)(Tao); 2053 PetscBool issame; 2054 2055 PetscFunctionBegin; 2056 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2057 2058 ierr = PetscObjectTypeCompare((PetscObject)tao,type,&issame);CHKERRQ(ierr); 2059 if (issame) PetscFunctionReturn(0); 2060 2061 ierr = PetscFunctionListFind(TaoList, type, (void(**)(void))&create_xxx);CHKERRQ(ierr); 2062 if (!create_xxx) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Tao type %s",type); 2063 2064 /* Destroy the existing solver information */ 2065 if (tao->ops->destroy) { 2066 ierr = (*tao->ops->destroy)(tao);CHKERRQ(ierr); 2067 } 2068 ierr = KSPDestroy(&tao->ksp);CHKERRQ(ierr); 2069 ierr = TaoLineSearchDestroy(&tao->linesearch);CHKERRQ(ierr); 2070 ierr = VecDestroy(&tao->gradient);CHKERRQ(ierr); 2071 ierr = VecDestroy(&tao->stepdirection);CHKERRQ(ierr); 2072 2073 tao->ops->setup = 0; 2074 tao->ops->solve = 0; 2075 tao->ops->view = 0; 2076 tao->ops->setfromoptions = 0; 2077 tao->ops->destroy = 0; 2078 2079 tao->setupcalled = PETSC_FALSE; 2080 2081 ierr = (*create_xxx)(tao);CHKERRQ(ierr); 2082 ierr = PetscObjectChangeTypeName((PetscObject)tao,type);CHKERRQ(ierr); 2083 PetscFunctionReturn(0); 2084 } 2085 2086 #undef __FUNCT__ 2087 #define __FUNCT__ "TaoRegister" 2088 /*MC 2089 TaoRegister - Adds a method to the TAO package for unconstrained minimization. 2090 2091 Synopsis: 2092 TaoRegister(char *name_solver,char *path,char *name_Create,int (*routine_Create)(Tao)) 2093 2094 Not collective 2095 2096 Input Parameters: 2097 + sname - name of a new user-defined solver 2098 - func - routine to Create method context 2099 2100 Notes: 2101 TaoRegister() may be called multiple times to add several user-defined solvers. 2102 2103 Sample usage: 2104 .vb 2105 TaoRegister("my_solver",MySolverCreate); 2106 .ve 2107 2108 Then, your solver can be chosen with the procedural interface via 2109 $ TaoSetType(tao,"my_solver") 2110 or at runtime via the option 2111 $ -tao_type my_solver 2112 2113 Level: advanced 2114 2115 .seealso: TaoRegisterAll(), TaoRegisterDestroy() 2116 M*/ 2117 PetscErrorCode TaoRegister(const char sname[], PetscErrorCode (*func)(Tao)) 2118 { 2119 PetscErrorCode ierr; 2120 2121 PetscFunctionBegin; 2122 ierr = PetscFunctionListAdd(&TaoList,sname, (void (*)(void))func);CHKERRQ(ierr); 2123 PetscFunctionReturn(0); 2124 } 2125 2126 #undef __FUNCT__ 2127 #define __FUNCT__ "TaoRegisterDestroy" 2128 /*@C 2129 TaoRegisterDestroy - Frees the list of minimization solvers that were 2130 registered by TaoRegisterDynamic(). 2131 2132 Not Collective 2133 2134 Level: advanced 2135 2136 .seealso: TaoRegisterAll(), TaoRegister() 2137 @*/ 2138 PetscErrorCode TaoRegisterDestroy(void) 2139 { 2140 PetscErrorCode ierr; 2141 PetscFunctionBegin; 2142 ierr = PetscFunctionListDestroy(&TaoList);CHKERRQ(ierr); 2143 TaoRegisterAllCalled = PETSC_FALSE; 2144 PetscFunctionReturn(0); 2145 } 2146 2147 #undef __FUNCT__ 2148 #define __FUNCT__ "TaoGetIterationNumber" 2149 /*@ 2150 TaoGetIterationNumber - Gets the number of Tao iterations completed 2151 at this time. 2152 2153 Not Collective 2154 2155 Input Parameter: 2156 . tao - Tao context 2157 2158 Output Parameter: 2159 . iter - iteration number 2160 2161 Notes: 2162 For example, during the computation of iteration 2 this would return 1. 2163 2164 2165 Level: intermediate 2166 2167 .keywords: Tao, nonlinear, get, iteration, number, 2168 2169 .seealso: TaoGetLinearSolveIterations() 2170 @*/ 2171 PetscErrorCode TaoGetIterationNumber(Tao tao,PetscInt *iter) 2172 { 2173 PetscFunctionBegin; 2174 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2175 PetscValidIntPointer(iter,2); 2176 *iter = tao->niter; 2177 PetscFunctionReturn(0); 2178 } 2179 2180 #undef __FUNCT__ 2181 #define __FUNCT__ "TaoSetIterationNumber" 2182 /*@ 2183 TaoSetIterationNumber - Sets the current iteration number. 2184 2185 Not Collective 2186 2187 Input Parameter: 2188 . tao - Tao context 2189 . iter - iteration number 2190 2191 Level: developer 2192 2193 .keywords: Tao, nonlinear, set, iteration, number, 2194 2195 .seealso: TaoGetLinearSolveIterations() 2196 @*/ 2197 PetscErrorCode TaoSetIterationNumber(Tao tao,PetscInt iter) 2198 { 2199 PetscErrorCode ierr; 2200 2201 PetscFunctionBegin; 2202 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2203 ierr = PetscObjectSAWsTakeAccess((PetscObject)tao);CHKERRQ(ierr); 2204 tao->niter = iter; 2205 ierr = PetscObjectSAWsGrantAccess((PetscObject)tao);CHKERRQ(ierr); 2206 PetscFunctionReturn(0); 2207 } 2208 2209 #undef __FUNCT__ 2210 #define __FUNCT__ "TaoGetTotalIterationNumber" 2211 /*@ 2212 TaoGetTotalIterationNumber - Gets the total number of Tao iterations 2213 completed. This number keeps accumulating if multiple solves 2214 are called with the Tao object. 2215 2216 Not Collective 2217 2218 Input Parameter: 2219 . tao - Tao context 2220 2221 Output Parameter: 2222 . iter - iteration number 2223 2224 Notes: 2225 The total iteration count is updated after each solve, if there is a current 2226 TaoSolve() in progress then those iterations are not yet counted. 2227 2228 Level: intermediate 2229 2230 .keywords: Tao, nonlinear, get, iteration, number, 2231 2232 .seealso: TaoGetLinearSolveIterations() 2233 @*/ 2234 PetscErrorCode TaoGetTotalIterationNumber(Tao tao,PetscInt *iter) 2235 { 2236 PetscFunctionBegin; 2237 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2238 PetscValidIntPointer(iter,2); 2239 *iter = tao->ntotalits; 2240 PetscFunctionReturn(0); 2241 } 2242 2243 #undef __FUNCT__ 2244 #define __FUNCT__ "TaoSetTotalIterationNumber" 2245 /*@ 2246 TaoSetTotalIterationNumber - Sets the current total iteration number. 2247 2248 Not Collective 2249 2250 Input Parameter: 2251 . tao - Tao context 2252 . iter - iteration number 2253 2254 Level: developer 2255 2256 .keywords: Tao, nonlinear, set, iteration, number, 2257 2258 .seealso: TaoGetLinearSolveIterations() 2259 @*/ 2260 PetscErrorCode TaoSetTotalIterationNumber(Tao tao,PetscInt iter) 2261 { 2262 PetscErrorCode ierr; 2263 2264 PetscFunctionBegin; 2265 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2266 ierr = PetscObjectSAWsTakeAccess((PetscObject)tao);CHKERRQ(ierr); 2267 tao->ntotalits = iter; 2268 ierr = PetscObjectSAWsGrantAccess((PetscObject)tao);CHKERRQ(ierr); 2269 PetscFunctionReturn(0); 2270 } 2271 2272 #undef __FUNCT__ 2273 #define __FUNCT__ "TaoSetConvergedReason" 2274 /*@ 2275 TaoSetConvergedReason - Sets the termination flag on a Tao object 2276 2277 Logically Collective on Tao 2278 2279 Input Parameters: 2280 + tao - the Tao context 2281 - reason - one of 2282 $ TAO_CONVERGED_ATOL (2), 2283 $ TAO_CONVERGED_RTOL (3), 2284 $ TAO_CONVERGED_STEPTOL (4), 2285 $ TAO_CONVERGED_MINF (5), 2286 $ TAO_CONVERGED_USER (6), 2287 $ TAO_DIVERGED_MAXITS (-2), 2288 $ TAO_DIVERGED_NAN (-4), 2289 $ TAO_DIVERGED_MAXFCN (-5), 2290 $ TAO_DIVERGED_LS_FAILURE (-6), 2291 $ TAO_DIVERGED_TR_REDUCTION (-7), 2292 $ TAO_DIVERGED_USER (-8), 2293 $ TAO_CONTINUE_ITERATING (0) 2294 2295 Level: intermediate 2296 2297 @*/ 2298 PetscErrorCode TaoSetConvergedReason(Tao tao, TaoConvergedReason reason) 2299 { 2300 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2301 PetscFunctionBegin; 2302 tao->reason = reason; 2303 PetscFunctionReturn(0); 2304 } 2305 2306 #undef __FUNCT__ 2307 #define __FUNCT__ "TaoGetConvergedReason" 2308 /*@ 2309 TaoGetConvergedReason - Gets the reason the Tao iteration was stopped. 2310 2311 Not Collective 2312 2313 Input Parameter: 2314 . tao - the Tao solver context 2315 2316 Output Parameter: 2317 . reason - one of 2318 $ TAO_CONVERGED_GATOL (3) ||g(X)|| < gatol 2319 $ TAO_CONVERGED_GRTOL (4) ||g(X)|| / f(X) < grtol 2320 $ TAO_CONVERGED_GTTOL (5) ||g(X)|| / ||g(X0)|| < gttol 2321 $ TAO_CONVERGED_STEPTOL (6) step size small 2322 $ TAO_CONVERGED_MINF (7) F < F_min 2323 $ TAO_CONVERGED_USER (8) User defined 2324 $ TAO_DIVERGED_MAXITS (-2) its > maxits 2325 $ TAO_DIVERGED_NAN (-4) Numerical problems 2326 $ TAO_DIVERGED_MAXFCN (-5) fevals > max_funcsals 2327 $ TAO_DIVERGED_LS_FAILURE (-6) line search failure 2328 $ TAO_DIVERGED_TR_REDUCTION (-7) trust region failure 2329 $ TAO_DIVERGED_USER(-8) (user defined) 2330 $ TAO_CONTINUE_ITERATING (0) 2331 2332 where 2333 + X - current solution 2334 . X0 - initial guess 2335 . f(X) - current function value 2336 . f(X*) - true solution (estimated) 2337 . g(X) - current gradient 2338 . its - current iterate number 2339 . maxits - maximum number of iterates 2340 . fevals - number of function evaluations 2341 - max_funcsals - maximum number of function evaluations 2342 2343 Level: intermediate 2344 2345 .seealso: TaoSetConvergenceTest(), TaoSetTolerances() 2346 2347 @*/ 2348 PetscErrorCode TaoGetConvergedReason(Tao tao, TaoConvergedReason *reason) 2349 { 2350 PetscFunctionBegin; 2351 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2352 PetscValidPointer(reason,2); 2353 *reason = tao->reason; 2354 PetscFunctionReturn(0); 2355 } 2356 2357 #undef __FUNCT__ 2358 #define __FUNCT__ "TaoGetSolutionStatus" 2359 /*@ 2360 TaoGetSolutionStatus - Get the current iterate, objective value, 2361 residual, infeasibility, and termination 2362 2363 Not Collective 2364 2365 Input Parameters: 2366 . tao - the Tao context 2367 2368 Output Parameters: 2369 + iterate - the current iterate number (>=0) 2370 . f - the current function value 2371 . gnorm - the square of the gradient norm, duality gap, or other measure indicating distance from optimality. 2372 . cnorm - the infeasibility of the current solution with regard to the constraints. 2373 . xdiff - the step length or trust region radius of the most recent iterate. 2374 - reason - The termination reason, which can equal TAO_CONTINUE_ITERATING 2375 2376 Level: intermediate 2377 2378 Note: 2379 TAO returns the values set by the solvers in the routine TaoMonitor(). 2380 2381 Note: 2382 If any of the output arguments are set to NULL, no corresponding value will be returned. 2383 2384 .seealso: TaoMonitor(), TaoGetConvergedReason() 2385 @*/ 2386 PetscErrorCode TaoGetSolutionStatus(Tao tao, PetscInt *its, PetscReal *f, PetscReal *gnorm, PetscReal *cnorm, PetscReal *xdiff, TaoConvergedReason *reason) 2387 { 2388 PetscFunctionBegin; 2389 if (its) *its=tao->niter; 2390 if (f) *f=tao->fc; 2391 if (gnorm) *gnorm=tao->residual; 2392 if (cnorm) *cnorm=tao->cnorm; 2393 if (reason) *reason=tao->reason; 2394 if (xdiff) *xdiff=tao->step; 2395 PetscFunctionReturn(0); 2396 } 2397 2398 #undef __FUNCT__ 2399 #define __FUNCT__ "TaoGetType" 2400 /*@C 2401 TaoGetType - Gets the current Tao algorithm. 2402 2403 Not Collective 2404 2405 Input Parameter: 2406 . tao - the Tao solver context 2407 2408 Output Parameter: 2409 . type - Tao method 2410 2411 Level: intermediate 2412 2413 @*/ 2414 PetscErrorCode TaoGetType(Tao tao, const TaoType *type) 2415 { 2416 PetscFunctionBegin; 2417 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2418 PetscValidPointer(type,2); 2419 *type=((PetscObject)tao)->type_name; 2420 PetscFunctionReturn(0); 2421 } 2422 2423 #undef __FUNCT__ 2424 #define __FUNCT__ "TaoMonitor" 2425 /*@C 2426 TaoMonitor - Monitor the solver and the current solution. This 2427 routine will record the iteration number and residual statistics, 2428 call any monitors specified by the user, and calls the convergence-check routine. 2429 2430 Input Parameters: 2431 + tao - the Tao context 2432 . its - the current iterate number (>=0) 2433 . f - the current objective function value 2434 . res - the gradient norm, square root of the duality gap, or other measure indicating distince from optimality. This measure will be recorded and 2435 used for some termination tests. 2436 . cnorm - the infeasibility of the current solution with regard to the constraints. 2437 - steplength - multiple of the step direction added to the previous iterate. 2438 2439 Output Parameters: 2440 . reason - The termination reason, which can equal TAO_CONTINUE_ITERATING 2441 2442 Options Database Key: 2443 . -tao_monitor - Use the default monitor, which prints statistics to standard output 2444 2445 .seealso TaoGetConvergedReason(), TaoDefaultMonitor(), TaoSetMonitor() 2446 2447 Level: developer 2448 2449 @*/ 2450 PetscErrorCode TaoMonitor(Tao tao, PetscInt its, PetscReal f, PetscReal res, PetscReal cnorm, PetscReal steplength, TaoConvergedReason *reason) 2451 { 2452 PetscErrorCode ierr; 2453 PetscInt i; 2454 2455 PetscFunctionBegin; 2456 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2457 tao->fc = f; 2458 tao->residual = res; 2459 tao->cnorm = cnorm; 2460 tao->step = steplength; 2461 if (!its) { 2462 tao->cnorm0 = cnorm; tao->gnorm0 = res; 2463 } 2464 TaoLogConvergenceHistory(tao,f,res,cnorm,tao->ksp_its); 2465 if (PetscIsInfOrNanReal(f) || PetscIsInfOrNanReal(res)) SETERRQ(PETSC_COMM_SELF,1, "User provided compute function generated Inf or NaN"); 2466 if (tao->ops->convergencetest) { 2467 ierr = (*tao->ops->convergencetest)(tao,tao->cnvP);CHKERRQ(ierr); 2468 } 2469 for (i=0;i<tao->numbermonitors;i++) { 2470 ierr = (*tao->monitor[i])(tao,tao->monitorcontext[i]);CHKERRQ(ierr); 2471 } 2472 *reason = tao->reason; 2473 PetscFunctionReturn(0); 2474 } 2475 2476 #undef __FUNCT__ 2477 #define __FUNCT__ "TaoSetConvergenceHistory" 2478 /*@ 2479 TaoSetConvergenceHistory - Sets the array used to hold the convergence history. 2480 2481 Logically Collective on Tao 2482 2483 Input Parameters: 2484 + tao - the Tao solver context 2485 . obj - array to hold objective value history 2486 . resid - array to hold residual history 2487 . cnorm - array to hold constraint violation history 2488 . lits - integer array holds the number of linear iterations for each Tao iteration 2489 . na - size of obj, resid, and cnorm 2490 - reset - PetscTrue indicates each new minimization resets the history counter to zero, 2491 else it continues storing new values for new minimizations after the old ones 2492 2493 Notes: 2494 If set, TAO will fill the given arrays with the indicated 2495 information at each iteration. If 'obj','resid','cnorm','lits' are 2496 *all* NULL then space (using size na, or 1000 if na is PETSC_DECIDE or 2497 PETSC_DEFAULT) is allocated for the history. 2498 If not all are NULL, then only the non-NULL information categories 2499 will be stored, the others will be ignored. 2500 2501 Any convergence information after iteration number 'na' will not be stored. 2502 2503 This routine is useful, e.g., when running a code for purposes 2504 of accurate performance monitoring, when no I/O should be done 2505 during the section of code that is being timed. 2506 2507 Level: intermediate 2508 2509 .seealso: TaoGetConvergenceHistory() 2510 2511 @*/ 2512 PetscErrorCode TaoSetConvergenceHistory(Tao tao, PetscReal *obj, PetscReal *resid, PetscReal *cnorm, PetscInt *lits, PetscInt na,PetscBool reset) 2513 { 2514 PetscErrorCode ierr; 2515 2516 PetscFunctionBegin; 2517 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2518 if (obj) PetscValidScalarPointer(obj,2); 2519 if (resid) PetscValidScalarPointer(resid,3); 2520 if (cnorm) PetscValidScalarPointer(cnorm,4); 2521 if (lits) PetscValidIntPointer(lits,5); 2522 2523 if (na == PETSC_DECIDE || na == PETSC_DEFAULT) na = 1000; 2524 if (!obj && !resid && !cnorm && !lits) { 2525 ierr = PetscCalloc1(na,&obj);CHKERRQ(ierr); 2526 ierr = PetscCalloc1(na,&resid);CHKERRQ(ierr); 2527 ierr = PetscCalloc1(na,&cnorm);CHKERRQ(ierr); 2528 ierr = PetscCalloc1(na,&lits);CHKERRQ(ierr); 2529 tao->hist_malloc=PETSC_TRUE; 2530 } 2531 2532 tao->hist_obj = obj; 2533 tao->hist_resid = resid; 2534 tao->hist_cnorm = cnorm; 2535 tao->hist_lits = lits; 2536 tao->hist_max = na; 2537 tao->hist_reset = reset; 2538 tao->hist_len = 0; 2539 PetscFunctionReturn(0); 2540 } 2541 2542 #undef __FUNCT__ 2543 #define __FUNCT__ "TaoGetConvergenceHistory" 2544 /*@C 2545 TaoGetConvergenceHistory - Gets the arrays used to hold the convergence history. 2546 2547 Collective on Tao 2548 2549 Input Parameter: 2550 . tao - the Tao context 2551 2552 Output Parameters: 2553 + obj - array used to hold objective value history 2554 . resid - array used to hold residual history 2555 . cnorm - array used to hold constraint violation history 2556 . lits - integer array used to hold linear solver iteration count 2557 - nhist - size of obj, resid, cnorm, and lits (will be less than or equal to na given in TaoSetHistory) 2558 2559 Notes: 2560 This routine must be preceded by calls to TaoSetConvergenceHistory() 2561 and TaoSolve(), otherwise it returns useless information. 2562 2563 The calling sequence for this routine in Fortran is 2564 $ call TaoGetConvergenceHistory(Tao tao, PetscInt nhist, PetscErrorCode ierr) 2565 2566 This routine is useful, e.g., when running a code for purposes 2567 of accurate performance monitoring, when no I/O should be done 2568 during the section of code that is being timed. 2569 2570 Level: advanced 2571 2572 .seealso: TaoSetConvergenceHistory() 2573 2574 @*/ 2575 PetscErrorCode TaoGetConvergenceHistory(Tao tao, PetscReal **obj, PetscReal **resid, PetscReal **cnorm, PetscInt **lits, PetscInt *nhist) 2576 { 2577 PetscFunctionBegin; 2578 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2579 if (obj) *obj = tao->hist_obj; 2580 if (cnorm) *cnorm = tao->hist_cnorm; 2581 if (resid) *resid = tao->hist_resid; 2582 if (nhist) *nhist = tao->hist_len; 2583 PetscFunctionReturn(0); 2584 } 2585 2586 #undef __FUNCT__ 2587 #define __FUNCT__ "TaoSetApplicationContext" 2588 /*@ 2589 TaoSetApplicationContext - Sets the optional user-defined context for 2590 a solver. 2591 2592 Logically Collective on Tao 2593 2594 Input Parameters: 2595 + tao - the Tao context 2596 - usrP - optional user context 2597 2598 Level: intermediate 2599 2600 .seealso: TaoGetApplicationContext(), TaoSetApplicationContext() 2601 @*/ 2602 PetscErrorCode TaoSetApplicationContext(Tao tao,void *usrP) 2603 { 2604 PetscFunctionBegin; 2605 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2606 tao->user = usrP; 2607 PetscFunctionReturn(0); 2608 } 2609 2610 #undef __FUNCT__ 2611 #define __FUNCT__ "TaoGetApplicationContext" 2612 /*@ 2613 TaoGetApplicationContext - Gets the user-defined context for a 2614 TAO solvers. 2615 2616 Not Collective 2617 2618 Input Parameter: 2619 . tao - Tao context 2620 2621 Output Parameter: 2622 . usrP - user context 2623 2624 Level: intermediate 2625 2626 .seealso: TaoSetApplicationContext() 2627 @*/ 2628 PetscErrorCode TaoGetApplicationContext(Tao tao,void *usrP) 2629 { 2630 PetscFunctionBegin; 2631 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2632 *(void**)usrP = tao->user; 2633 PetscFunctionReturn(0); 2634 } 2635 2636 #undef __FUNCT__ 2637 #define __FUNCT__ "TaoSetGradientNorm" 2638 /*@ 2639 TaoSetGradientNorm - Sets the matrix used to define the inner product that measures the size of the gradient. 2640 2641 Collective on tao 2642 2643 Input Parameters: 2644 + tao - the Tao context 2645 - M - gradient norm 2646 2647 Level: beginner 2648 2649 .seealso: TaoGetGradientNorm(), TaoGradientNorm() 2650 @*/ 2651 PetscErrorCode TaoSetGradientNorm(Tao tao, Mat M) 2652 { 2653 PetscErrorCode ierr; 2654 2655 PetscFunctionBegin; 2656 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2657 2658 if (tao->gradient_norm) { 2659 ierr = PetscObjectDereference((PetscObject)tao->gradient_norm);CHKERRQ(ierr); 2660 ierr = VecDestroy(&tao->gradient_norm_tmp);CHKERRQ(ierr); 2661 } 2662 2663 ierr = PetscObjectReference((PetscObject)M);CHKERRQ(ierr); 2664 tao->gradient_norm = M; 2665 ierr = MatCreateVecs(M, NULL, &tao->gradient_norm_tmp);CHKERRQ(ierr); 2666 PetscFunctionReturn(0); 2667 } 2668 2669 #undef __FUNCT__ 2670 #define __FUNCT__ "TaoGetGradientNorm" 2671 /*@ 2672 TaoGetGradientNorm - Returns the matrix used to define the inner product for measuring the size of the gradient. 2673 2674 Not Collective 2675 2676 Input Parameter: 2677 . tao - Tao context 2678 2679 Output Parameter: 2680 . M - gradient norm 2681 2682 Level: beginner 2683 2684 .seealso: TaoSetGradientNorm(), TaoGradientNorm() 2685 @*/ 2686 PetscErrorCode TaoGetGradientNorm(Tao tao, Mat *M) 2687 { 2688 PetscFunctionBegin; 2689 PetscValidHeaderSpecific(tao,TAO_CLASSID,1); 2690 *M = tao->gradient_norm; 2691 PetscFunctionReturn(0); 2692 } 2693 2694 #undef __FUNCT__ 2695 #define __FUNCT__ "TaoGradientNorm" 2696 /*c 2697 TaoGradientNorm - Compute the norm with respect to the inner product the user has set. 2698 2699 Collective on tao 2700 2701 Input Parameter: 2702 . tao - the Tao context 2703 . gradient - the gradient to be computed 2704 . norm - the norm type 2705 2706 Output Parameter: 2707 . gnorm - the gradient norm 2708 2709 Level: developer 2710 2711 .seealso: TaoSetGradientNorm(), TaoGetGradientNorm() 2712 @*/ 2713 PetscErrorCode TaoGradientNorm(Tao tao, Vec gradient, NormType type, PetscReal *gnorm) 2714 { 2715 PetscErrorCode ierr; 2716 2717 PetscFunctionBegin; 2718 PetscValidHeaderSpecific(gradient,VEC_CLASSID,1); 2719 2720 if (tao->gradient_norm) { 2721 PetscScalar gnorms; 2722 2723 if (type != NORM_2) SETERRQ(PetscObjectComm((PetscObject)gradient), PETSC_ERR_ARG_WRONGSTATE, "Norm type must be NORM_2 if an inner product for the gradient norm is set."); 2724 ierr = MatMult(tao->gradient_norm, gradient, tao->gradient_norm_tmp);CHKERRQ(ierr); 2725 ierr = VecDot(gradient, tao->gradient_norm_tmp, &gnorms);CHKERRQ(ierr); 2726 *gnorm = PetscRealPart(PetscSqrtScalar(gnorms)); 2727 } else { 2728 ierr = VecNorm(gradient, type, gnorm);CHKERRQ(ierr); 2729 } 2730 PetscFunctionReturn(0); 2731 } 2732 2733 2734