1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/ 2 3 /*@ 4 TaoSetSolution - Sets the vector holding the initial guess for the solve 5 6 Logically collective on tao 7 8 Input Parameters: 9 + tao - the Tao context 10 - x0 - the initial guess 11 12 Level: beginner 13 .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()` 14 @*/ 15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0) 16 { 17 PetscFunctionBegin; 18 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 19 if (x0) PetscValidHeaderSpecific(x0, VEC_CLASSID, 2); 20 PetscCall(PetscObjectReference((PetscObject)x0)); 21 PetscCall(VecDestroy(&tao->solution)); 22 tao->solution = x0; 23 PetscFunctionReturn(0); 24 } 25 26 PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1) 27 { 28 Vec g2, g3; 29 PetscBool complete_print = PETSC_FALSE, test = PETSC_FALSE; 30 PetscReal hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm; 31 PetscScalar dot; 32 MPI_Comm comm; 33 PetscViewer viewer, mviewer; 34 PetscViewerFormat format; 35 PetscInt tabs; 36 static PetscBool directionsprinted = PETSC_FALSE; 37 38 PetscFunctionBegin; 39 PetscObjectOptionsBegin((PetscObject)tao); 40 PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test)); 41 PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print)); 42 PetscOptionsEnd(); 43 if (!test) { 44 if (complete_print) PetscCall(PetscViewerDestroy(&mviewer)); 45 PetscFunctionReturn(0); 46 } 47 48 PetscCall(PetscObjectGetComm((PetscObject)tao, &comm)); 49 PetscCall(PetscViewerASCIIGetStdout(comm, &viewer)); 50 PetscCall(PetscViewerASCIIGetTab(viewer, &tabs)); 51 PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel)); 52 PetscCall(PetscViewerASCIIPrintf(viewer, " ---------- Testing Gradient -------------\n")); 53 if (!complete_print && !directionsprinted) { 54 PetscCall(PetscViewerASCIIPrintf(viewer, " Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n")); 55 PetscCall(PetscViewerASCIIPrintf(viewer, " of hand-coded and finite difference gradient entries greater than <threshold>.\n")); 56 } 57 if (!directionsprinted) { 58 PetscCall(PetscViewerASCIIPrintf(viewer, " Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n")); 59 PetscCall(PetscViewerASCIIPrintf(viewer, " O(1.e-8), the hand-coded Gradient is probably correct.\n")); 60 directionsprinted = PETSC_TRUE; 61 } 62 if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format)); 63 64 PetscCall(VecDuplicate(x, &g2)); 65 PetscCall(VecDuplicate(x, &g3)); 66 67 /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */ 68 PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL)); 69 70 PetscCall(VecNorm(g2, NORM_2, &fdnorm)); 71 PetscCall(VecNorm(g1, NORM_2, &hcnorm)); 72 PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax)); 73 PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax)); 74 PetscCall(VecDot(g1, g2, &dot)); 75 PetscCall(VecCopy(g1, g3)); 76 PetscCall(VecAXPY(g3, -1.0, g2)); 77 PetscCall(VecNorm(g3, NORM_2, &diffnorm)); 78 PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax)); 79 PetscCall(PetscViewerASCIIPrintf(viewer, " ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm)))); 80 PetscCall(PetscViewerASCIIPrintf(viewer, " 2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm)); 81 PetscCall(PetscViewerASCIIPrintf(viewer, " max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax)); 82 83 if (complete_print) { 84 PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded gradient ----------\n")); 85 PetscCall(VecView(g1, mviewer)); 86 PetscCall(PetscViewerASCIIPrintf(viewer, " Finite difference gradient ----------\n")); 87 PetscCall(VecView(g2, mviewer)); 88 PetscCall(PetscViewerASCIIPrintf(viewer, " Hand-coded minus finite-difference gradient ----------\n")); 89 PetscCall(VecView(g3, mviewer)); 90 } 91 PetscCall(VecDestroy(&g2)); 92 PetscCall(VecDestroy(&g3)); 93 94 if (complete_print) { 95 PetscCall(PetscViewerPopFormat(mviewer)); 96 PetscCall(PetscViewerDestroy(&mviewer)); 97 } 98 PetscCall(PetscViewerASCIISetTab(viewer, tabs)); 99 PetscFunctionReturn(0); 100 } 101 102 /*@ 103 TaoComputeGradient - Computes the gradient of the objective function 104 105 Collective on tao 106 107 Input Parameters: 108 + tao - the Tao context 109 - X - input vector 110 111 Output Parameter: 112 . G - gradient vector 113 114 Options Database Keys: 115 + -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors 116 - -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient 117 118 Note: 119 `TaoComputeGradient()` is typically used within the implementation of the optimization method, 120 so most users would not generally call this routine themselves. 121 122 Level: developer 123 124 .seealso: `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()` 125 @*/ 126 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G) 127 { 128 PetscReal dummy; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 132 PetscValidHeaderSpecific(X, VEC_CLASSID, 2); 133 PetscValidHeaderSpecific(G, VEC_CLASSID, 3); 134 PetscCheckSameComm(tao, 1, X, 2); 135 PetscCheckSameComm(tao, 1, G, 3); 136 PetscCall(VecLockReadPush(X)); 137 if (tao->ops->computegradient) { 138 PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL)); 139 PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP)); 140 PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL)); 141 tao->ngrads++; 142 } else if (tao->ops->computeobjectiveandgradient) { 143 PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL)); 144 PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP)); 145 PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL)); 146 tao->nfuncgrads++; 147 } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called"); 148 PetscCall(VecLockReadPop(X)); 149 150 PetscCall(TaoTestGradient(tao, X, G)); 151 PetscFunctionReturn(0); 152 } 153 154 /*@ 155 TaoComputeObjective - Computes the objective function value at a given point 156 157 Collective on tao 158 159 Input Parameters: 160 + tao - the Tao context 161 - X - input vector 162 163 Output Parameter: 164 . f - Objective value at X 165 166 Note: 167 `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm 168 so most users would not generally call this routine themselves. 169 170 Level: developer 171 172 .seealso: `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()` 173 @*/ 174 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f) 175 { 176 Vec temp; 177 178 PetscFunctionBegin; 179 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 180 PetscValidHeaderSpecific(X, VEC_CLASSID, 2); 181 PetscCheckSameComm(tao, 1, X, 2); 182 PetscCall(VecLockReadPush(X)); 183 if (tao->ops->computeobjective) { 184 PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL)); 185 PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP)); 186 PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL)); 187 tao->nfuncs++; 188 } else if (tao->ops->computeobjectiveandgradient) { 189 PetscCall(PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n")); 190 PetscCall(VecDuplicate(X, &temp)); 191 PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL)); 192 PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP)); 193 PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL)); 194 PetscCall(VecDestroy(&temp)); 195 tao->nfuncgrads++; 196 } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called"); 197 PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f))); 198 PetscCall(VecLockReadPop(X)); 199 PetscFunctionReturn(0); 200 } 201 202 /*@ 203 TaoComputeObjectiveAndGradient - Computes the objective function value at a given point 204 205 Collective on tao 206 207 Input Parameters: 208 + tao - the Tao context 209 - X - input vector 210 211 Output Parameters: 212 + f - Objective value at X 213 - g - Gradient vector at X 214 215 Note: 216 `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm, 217 so most users would not generally call this routine themselves. 218 219 Level: developer 220 221 .seealso: `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()` 222 @*/ 223 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G) 224 { 225 PetscFunctionBegin; 226 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 227 PetscValidHeaderSpecific(X, VEC_CLASSID, 2); 228 PetscValidHeaderSpecific(G, VEC_CLASSID, 4); 229 PetscCheckSameComm(tao, 1, X, 2); 230 PetscCheckSameComm(tao, 1, G, 4); 231 PetscCall(VecLockReadPush(X)); 232 if (tao->ops->computeobjectiveandgradient) { 233 PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL)); 234 if (tao->ops->computegradient == TaoDefaultComputeGradient) { 235 PetscCall(TaoComputeObjective(tao, X, f)); 236 PetscCall(TaoDefaultComputeGradient(tao, X, G, NULL)); 237 } else PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP)); 238 PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL)); 239 tao->nfuncgrads++; 240 } else if (tao->ops->computeobjective && tao->ops->computegradient) { 241 PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL)); 242 PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP)); 243 PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL)); 244 tao->nfuncs++; 245 PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL)); 246 PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP)); 247 PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL)); 248 tao->ngrads++; 249 } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set"); 250 PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f))); 251 PetscCall(VecLockReadPop(X)); 252 253 PetscCall(TaoTestGradient(tao, X, G)); 254 PetscFunctionReturn(0); 255 } 256 257 /*@C 258 TaoSetObjective - Sets the function evaluation routine for minimization 259 260 Logically collective on tao 261 262 Input Parameters: 263 + tao - the Tao context 264 . func - the objective function 265 - ctx - [optional] user-defined context for private data for the function evaluation 266 routine (may be NULL) 267 268 Calling sequence of func: 269 $ func (Tao tao, Vec x, PetscReal *f, void *ctx); 270 271 + x - input vector 272 . f - function value 273 - ctx - [optional] user-defined function context 274 275 Level: beginner 276 277 .seealso: `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()` 278 @*/ 279 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, void *), void *ctx) 280 { 281 PetscFunctionBegin; 282 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 283 if (ctx) tao->user_objP = ctx; 284 if (func) tao->ops->computeobjective = func; 285 PetscFunctionReturn(0); 286 } 287 288 /*@C 289 TaoGetObjective - Gets the function evaluation routine for the function to be minimized 290 291 Not collective 292 293 Input Parameter: 294 . tao - the Tao context 295 296 Output Parameters 297 + func - the objective function 298 - ctx - the user-defined context for private data for the function evaluation 299 300 Calling sequence of func: 301 $ func (Tao tao, Vec x, PetscReal *f, void *ctx); 302 303 + x - input vector 304 . f - function value 305 - ctx - [optional] user-defined function context 306 307 Level: beginner 308 309 .seealso: `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()` 310 @*/ 311 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal *, void *), void **ctx) 312 { 313 PetscFunctionBegin; 314 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 315 if (func) *func = tao->ops->computeobjective; 316 if (ctx) *ctx = tao->user_objP; 317 PetscFunctionReturn(0); 318 } 319 320 /*@C 321 TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications 322 323 Logically collective on tao 324 325 Input Parameters: 326 + tao - the Tao context 327 . func - the residual evaluation routine 328 - ctx - [optional] user-defined context for private data for the function evaluation 329 routine (may be NULL) 330 331 Calling sequence of func: 332 $ func (Tao tao, Vec x, Vec f, void *ctx); 333 334 + x - input vector 335 . f - function value vector 336 - ctx - [optional] user-defined function context 337 338 Level: beginner 339 340 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()` 341 @*/ 342 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx) 343 { 344 PetscFunctionBegin; 345 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 346 PetscValidHeaderSpecific(res, VEC_CLASSID, 2); 347 PetscCall(PetscObjectReference((PetscObject)res)); 348 if (tao->ls_res) PetscCall(VecDestroy(&tao->ls_res)); 349 tao->ls_res = res; 350 tao->user_lsresP = ctx; 351 tao->ops->computeresidual = func; 352 353 PetscFunctionReturn(0); 354 } 355 356 /*@ 357 TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. 358 If this function is not provided, or if sigma_v and sigma_w are both NULL, then the identity matrix will be used for weights. 359 360 Collective on tao 361 362 Input Parameters: 363 + tao - the Tao context 364 . sigma_v - vector of weights (diagonal terms only) 365 . n - the number of weights (if using off-diagonal) 366 . rows - index list of rows for sigma_w 367 . cols - index list of columns for sigma_w 368 - vals - array of weights 369 370 Note: Either sigma_v or sigma_w (or both) should be NULL 371 372 Level: intermediate 373 374 .seealso: `Tao`, `TaoSetResidualRoutine()` 375 @*/ 376 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals) 377 { 378 PetscInt i; 379 380 PetscFunctionBegin; 381 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 382 if (sigma_v) PetscValidHeaderSpecific(sigma_v, VEC_CLASSID, 2); 383 PetscCall(PetscObjectReference((PetscObject)sigma_v)); 384 PetscCall(VecDestroy(&tao->res_weights_v)); 385 tao->res_weights_v = sigma_v; 386 if (vals) { 387 PetscCall(PetscFree(tao->res_weights_rows)); 388 PetscCall(PetscFree(tao->res_weights_cols)); 389 PetscCall(PetscFree(tao->res_weights_w)); 390 PetscCall(PetscMalloc1(n, &tao->res_weights_rows)); 391 PetscCall(PetscMalloc1(n, &tao->res_weights_cols)); 392 PetscCall(PetscMalloc1(n, &tao->res_weights_w)); 393 tao->res_weights_n = n; 394 for (i = 0; i < n; i++) { 395 tao->res_weights_rows[i] = rows[i]; 396 tao->res_weights_cols[i] = cols[i]; 397 tao->res_weights_w[i] = vals[i]; 398 } 399 } else { 400 tao->res_weights_n = 0; 401 tao->res_weights_rows = NULL; 402 tao->res_weights_cols = NULL; 403 } 404 PetscFunctionReturn(0); 405 } 406 407 /*@ 408 TaoComputeResidual - Computes a least-squares residual vector at a given point 409 410 Collective on tao 411 412 Input Parameters: 413 + tao - the Tao context 414 - X - input vector 415 416 Output Parameter: 417 . f - Objective vector at X 418 419 Notes: 420 `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm, 421 so most users would not generally call this routine themselves. 422 423 Level: advanced 424 425 .seealso: `Tao`, `TaoSetResidualRoutine()` 426 @*/ 427 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F) 428 { 429 PetscFunctionBegin; 430 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 431 PetscValidHeaderSpecific(X, VEC_CLASSID, 2); 432 PetscValidHeaderSpecific(F, VEC_CLASSID, 3); 433 PetscCheckSameComm(tao, 1, X, 2); 434 PetscCheckSameComm(tao, 1, F, 3); 435 if (tao->ops->computeresidual) { 436 PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL)); 437 PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP)); 438 PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL)); 439 tao->nfuncs++; 440 } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called"); 441 PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n")); 442 PetscFunctionReturn(0); 443 } 444 445 /*@C 446 TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized 447 448 Logically collective on tao 449 450 Input Parameters: 451 + tao - the Tao context 452 . g - [optional] the vector to internally hold the gradient computation 453 . func - the gradient function 454 - ctx - [optional] user-defined context for private data for the gradient evaluation 455 routine (may be NULL) 456 457 Calling sequence of func: 458 $ func (Tao tao, Vec x, Vec g, void *ctx); 459 460 + x - input vector 461 . g - gradient value (output) 462 - ctx - [optional] user-defined function context 463 464 Level: beginner 465 466 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()` 467 @*/ 468 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx) 469 { 470 PetscFunctionBegin; 471 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 472 if (g) { 473 PetscValidHeaderSpecific(g, VEC_CLASSID, 2); 474 PetscCheckSameComm(tao, 1, g, 2); 475 PetscCall(PetscObjectReference((PetscObject)g)); 476 PetscCall(VecDestroy(&tao->gradient)); 477 tao->gradient = g; 478 } 479 if (func) tao->ops->computegradient = func; 480 if (ctx) tao->user_gradP = ctx; 481 PetscFunctionReturn(0); 482 } 483 484 /*@C 485 TaoGetGradient - Gets the gradient evaluation routine for the function being optimized 486 487 Not collective 488 489 Input Parameter: 490 . tao - the Tao context 491 492 Output Parameters: 493 + g - the vector to internally hold the gradient computation 494 . func - the gradient function 495 - ctx - user-defined context for private data for the gradient evaluation routine 496 497 Calling sequence of func: 498 $ func (Tao tao, Vec x, Vec g, void *ctx); 499 500 + x - input vector 501 . g - gradient value (output) 502 - ctx - [optional] user-defined function context 503 504 Level: beginner 505 506 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()` 507 @*/ 508 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void *), void **ctx) 509 { 510 PetscFunctionBegin; 511 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 512 if (g) *g = tao->gradient; 513 if (func) *func = tao->ops->computegradient; 514 if (ctx) *ctx = tao->user_gradP; 515 PetscFunctionReturn(0); 516 } 517 518 /*@C 519 TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized 520 521 Logically collective on tao 522 523 Input Parameters: 524 + tao - the Tao context 525 . g - [optional] the vector to internally hold the gradient computation 526 . func - the gradient function 527 - ctx - [optional] user-defined context for private data for the gradient evaluation 528 routine (may be NULL) 529 530 Calling sequence of func: 531 $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx); 532 533 + x - input vector 534 . f - objective value (output) 535 . g - gradient value (output) 536 - ctx - [optional] user-defined function context 537 538 Level: beginner 539 540 Note: 541 For some optimization methods using a combined function can be more eifficient. 542 543 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()` 544 @*/ 545 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) 546 { 547 PetscFunctionBegin; 548 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 549 if (g) { 550 PetscValidHeaderSpecific(g, VEC_CLASSID, 2); 551 PetscCheckSameComm(tao, 1, g, 2); 552 PetscCall(PetscObjectReference((PetscObject)g)); 553 PetscCall(VecDestroy(&tao->gradient)); 554 tao->gradient = g; 555 } 556 if (ctx) tao->user_objgradP = ctx; 557 if (func) tao->ops->computeobjectiveandgradient = func; 558 PetscFunctionReturn(0); 559 } 560 561 /*@C 562 TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized 563 564 Not collective 565 566 Input Parameter: 567 . tao - the Tao context 568 569 Output Parameters: 570 + g - the vector to internally hold the gradient computation 571 . func - the gradient function 572 - ctx - user-defined context for private data for the gradient evaluation routine 573 574 Calling sequence of func: 575 $ func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx); 576 577 + x - input vector 578 . f - objective value (output) 579 . g - gradient value (output) 580 - ctx - [optional] user-defined function context 581 582 Level: beginner 583 584 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()` 585 @*/ 586 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal *, Vec, void *), void **ctx) 587 { 588 PetscFunctionBegin; 589 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 590 if (g) *g = tao->gradient; 591 if (func) *func = tao->ops->computeobjectiveandgradient; 592 if (ctx) *ctx = tao->user_objgradP; 593 PetscFunctionReturn(0); 594 } 595 596 /*@ 597 TaoIsObjectiveDefined - Checks to see if the user has 598 declared an objective-only routine. Useful for determining when 599 it is appropriate to call `TaoComputeObjective()` or 600 `TaoComputeObjectiveAndGradient()` 601 602 Not collective 603 604 Input Parameter: 605 . tao - the Tao context 606 607 Output Parameter: 608 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise 609 610 Level: developer 611 612 .seealso: `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()` 613 @*/ 614 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg) 615 { 616 PetscFunctionBegin; 617 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 618 if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE; 619 else *flg = PETSC_TRUE; 620 PetscFunctionReturn(0); 621 } 622 623 /*@ 624 TaoIsGradientDefined - Checks to see if the user has 625 declared an objective-only routine. Useful for determining when 626 it is appropriate to call `TaoComputeGradient()` or 627 `TaoComputeGradientAndGradient()` 628 629 Not Collective 630 631 Input Parameter: 632 . tao - the Tao context 633 634 Output Parameter: 635 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise 636 637 Level: developer 638 639 .seealso: `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()` 640 @*/ 641 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg) 642 { 643 PetscFunctionBegin; 644 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 645 if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE; 646 else *flg = PETSC_TRUE; 647 PetscFunctionReturn(0); 648 } 649 650 /*@ 651 TaoIsObjectiveAndGradientDefined - Checks to see if the user has 652 declared a joint objective/gradient routine. Useful for determining when 653 it is appropriate to call `TaoComputeObjective()` or 654 `TaoComputeObjectiveAndGradient()` 655 656 Not Collective 657 658 Input Parameter: 659 . tao - the Tao context 660 661 Output Parameter: 662 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise 663 664 Level: developer 665 666 .seealso: `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()` 667 @*/ 668 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg) 669 { 670 PetscFunctionBegin; 671 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 672 if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE; 673 else *flg = PETSC_TRUE; 674 PetscFunctionReturn(0); 675 } 676