xref: /petsc/src/tao/interface/taosolver_fg.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetSolution - Sets the vector holding the initial guess for the solve
5 
6   Logically collective on tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
14 @*/
15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0) {
16   PetscFunctionBegin;
17   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
18   if (x0) PetscValidHeaderSpecific(x0, VEC_CLASSID, 2);
19   PetscCall(PetscObjectReference((PetscObject)x0));
20   PetscCall(VecDestroy(&tao->solution));
21   tao->solution = x0;
22   PetscFunctionReturn(0);
23 }
24 
25 PetscErrorCode TaoTestGradient(Tao tao, Vec x, Vec g1) {
26   Vec               g2, g3;
27   PetscBool         complete_print = PETSC_FALSE, test = PETSC_FALSE;
28   PetscReal         hcnorm, fdnorm, hcmax, fdmax, diffmax, diffnorm;
29   PetscScalar       dot;
30   MPI_Comm          comm;
31   PetscViewer       viewer, mviewer;
32   PetscViewerFormat format;
33   PetscInt          tabs;
34   static PetscBool  directionsprinted = PETSC_FALSE;
35 
36   PetscFunctionBegin;
37   PetscObjectOptionsBegin((PetscObject)tao);
38   PetscCall(PetscOptionsName("-tao_test_gradient", "Compare hand-coded and finite difference Gradients", "None", &test));
39   PetscCall(PetscOptionsViewer("-tao_test_gradient_view", "View difference between hand-coded and finite difference Gradients element entries", "None", &mviewer, &format, &complete_print));
40   PetscOptionsEnd();
41   if (!test) {
42     if (complete_print) { PetscCall(PetscViewerDestroy(&mviewer)); }
43     PetscFunctionReturn(0);
44   }
45 
46   PetscCall(PetscObjectGetComm((PetscObject)tao, &comm));
47   PetscCall(PetscViewerASCIIGetStdout(comm, &viewer));
48   PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
49   PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
50   PetscCall(PetscViewerASCIIPrintf(viewer, "  ---------- Testing Gradient -------------\n"));
51   if (!complete_print && !directionsprinted) {
52     PetscCall(PetscViewerASCIIPrintf(viewer, "  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
53     PetscCall(PetscViewerASCIIPrintf(viewer, "    of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
54   }
55   if (!directionsprinted) {
56     PetscCall(PetscViewerASCIIPrintf(viewer, "  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
57     PetscCall(PetscViewerASCIIPrintf(viewer, "    O(1.e-8), the hand-coded Gradient is probably correct.\n"));
58     directionsprinted = PETSC_TRUE;
59   }
60   if (complete_print) PetscCall(PetscViewerPushFormat(mviewer, format));
61 
62   PetscCall(VecDuplicate(x, &g2));
63   PetscCall(VecDuplicate(x, &g3));
64 
65   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
66   PetscCall(TaoDefaultComputeGradient(tao, x, g2, NULL));
67 
68   PetscCall(VecNorm(g2, NORM_2, &fdnorm));
69   PetscCall(VecNorm(g1, NORM_2, &hcnorm));
70   PetscCall(VecNorm(g2, NORM_INFINITY, &fdmax));
71   PetscCall(VecNorm(g1, NORM_INFINITY, &hcmax));
72   PetscCall(VecDot(g1, g2, &dot));
73   PetscCall(VecCopy(g1, g3));
74   PetscCall(VecAXPY(g3, -1.0, g2));
75   PetscCall(VecNorm(g3, NORM_2, &diffnorm));
76   PetscCall(VecNorm(g3, NORM_INFINITY, &diffmax));
77   PetscCall(PetscViewerASCIIPrintf(viewer, "  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot) / (fdnorm * hcnorm))));
78   PetscCall(PetscViewerASCIIPrintf(viewer, "  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffnorm / PetscMax(hcnorm, fdnorm)), (double)diffnorm));
79   PetscCall(PetscViewerASCIIPrintf(viewer, "  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n", (double)(diffmax / PetscMax(hcmax, fdmax)), (double)diffmax));
80 
81   if (complete_print) {
82     PetscCall(PetscViewerASCIIPrintf(viewer, "  Hand-coded gradient ----------\n"));
83     PetscCall(VecView(g1, mviewer));
84     PetscCall(PetscViewerASCIIPrintf(viewer, "  Finite difference gradient ----------\n"));
85     PetscCall(VecView(g2, mviewer));
86     PetscCall(PetscViewerASCIIPrintf(viewer, "  Hand-coded minus finite-difference gradient ----------\n"));
87     PetscCall(VecView(g3, mviewer));
88   }
89   PetscCall(VecDestroy(&g2));
90   PetscCall(VecDestroy(&g3));
91 
92   if (complete_print) {
93     PetscCall(PetscViewerPopFormat(mviewer));
94     PetscCall(PetscViewerDestroy(&mviewer));
95   }
96   PetscCall(PetscViewerASCIISetTab(viewer, tabs));
97   PetscFunctionReturn(0);
98 }
99 
100 /*@
101   TaoComputeGradient - Computes the gradient of the objective function
102 
103   Collective on tao
104 
105   Input Parameters:
106 + tao - the Tao context
107 - X - input vector
108 
109   Output Parameter:
110 . G - gradient vector
111 
112   Options Database Keys:
113 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
114 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
115 
116   Note:
117     `TaoComputeGradient()` is typically used within the implementation of the optimization method,
118   so most users would not generally call this routine themselves.
119 
120   Level: developer
121 
122 .seealso: `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
123 @*/
124 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G) {
125   PetscReal dummy;
126 
127   PetscFunctionBegin;
128   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
129   PetscValidHeaderSpecific(X, VEC_CLASSID, 2);
130   PetscValidHeaderSpecific(G, VEC_CLASSID, 3);
131   PetscCheckSameComm(tao, 1, X, 2);
132   PetscCheckSameComm(tao, 1, G, 3);
133   PetscCall(VecLockReadPush(X));
134   if (tao->ops->computegradient) {
135     PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
136     PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
137     PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
138     tao->ngrads++;
139   } else if (tao->ops->computeobjectiveandgradient) {
140     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
141     PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, &dummy, G, tao->user_objgradP));
142     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
143     tao->nfuncgrads++;
144   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetGradient() has not been called");
145   PetscCall(VecLockReadPop(X));
146 
147   PetscCall(TaoTestGradient(tao, X, G));
148   PetscFunctionReturn(0);
149 }
150 
151 /*@
152   TaoComputeObjective - Computes the objective function value at a given point
153 
154   Collective on tao
155 
156   Input Parameters:
157 + tao - the Tao context
158 - X - input vector
159 
160   Output Parameter:
161 . f - Objective value at X
162 
163   Note:
164     `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
165   so most users would not generally call this routine themselves.
166 
167   Level: developer
168 
169 .seealso: `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
170 @*/
171 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f) {
172   Vec temp;
173 
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
176   PetscValidHeaderSpecific(X, VEC_CLASSID, 2);
177   PetscCheckSameComm(tao, 1, X, 2);
178   PetscCall(VecLockReadPush(X));
179   if (tao->ops->computeobjective) {
180     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
181     PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
182     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
183     tao->nfuncs++;
184   } else if (tao->ops->computeobjectiveandgradient) {
185     PetscCall(PetscInfo(tao, "Duplicating variable vector in order to call func/grad routine\n"));
186     PetscCall(VecDuplicate(X, &temp));
187     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, NULL, NULL));
188     PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, temp, tao->user_objgradP));
189     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, NULL, NULL));
190     PetscCall(VecDestroy(&temp));
191     tao->nfuncgrads++;
192   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() has not been called");
193   PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
194   PetscCall(VecLockReadPop(X));
195   PetscFunctionReturn(0);
196 }
197 
198 /*@
199   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
200 
201   Collective on tao
202 
203   Input Parameters:
204 + tao - the Tao context
205 - X - input vector
206 
207   Output Parameters:
208 + f - Objective value at X
209 - g - Gradient vector at X
210 
211   Note:
212     `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
213   so most users would not generally call this routine themselves.
214 
215   Level: developer
216 
217 .seealso: `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
218 @*/
219 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G) {
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
222   PetscValidHeaderSpecific(X, VEC_CLASSID, 2);
223   PetscValidHeaderSpecific(G, VEC_CLASSID, 4);
224   PetscCheckSameComm(tao, 1, X, 2);
225   PetscCheckSameComm(tao, 1, G, 4);
226   PetscCall(VecLockReadPush(X));
227   if (tao->ops->computeobjectiveandgradient) {
228     PetscCall(PetscLogEventBegin(TAO_ObjGradEval, tao, X, G, NULL));
229     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
230       PetscCall(TaoComputeObjective(tao, X, f));
231       PetscCall(TaoDefaultComputeGradient(tao, X, G, NULL));
232     } else {
233       PetscCallBack("Tao callback objective/gradient", (*tao->ops->computeobjectiveandgradient)(tao, X, f, G, tao->user_objgradP));
234     }
235     PetscCall(PetscLogEventEnd(TAO_ObjGradEval, tao, X, G, NULL));
236     tao->nfuncgrads++;
237   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
238     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
239     PetscCallBack("Tao callback objective", (*tao->ops->computeobjective)(tao, X, f, tao->user_objP));
240     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
241     tao->nfuncs++;
242     PetscCall(PetscLogEventBegin(TAO_GradientEval, tao, X, G, NULL));
243     PetscCallBack("Tao callback gradient", (*tao->ops->computegradient)(tao, X, G, tao->user_gradP));
244     PetscCall(PetscLogEventEnd(TAO_GradientEval, tao, X, G, NULL));
245     tao->ngrads++;
246   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetObjective() or TaoSetGradient() not set");
247   PetscCall(PetscInfo(tao, "TAO Function evaluation: %20.19e\n", (double)(*f)));
248   PetscCall(VecLockReadPop(X));
249 
250   PetscCall(TaoTestGradient(tao, X, G));
251   PetscFunctionReturn(0);
252 }
253 
254 /*@C
255   TaoSetObjective - Sets the function evaluation routine for minimization
256 
257   Logically collective on tao
258 
259   Input Parameters:
260 + tao - the Tao context
261 . func - the objective function
262 - ctx - [optional] user-defined context for private data for the function evaluation
263         routine (may be NULL)
264 
265   Calling sequence of func:
266 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
267 
268 + x - input vector
269 . f - function value
270 - ctx - [optional] user-defined function context
271 
272   Level: beginner
273 
274 .seealso: `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
275 @*/
276 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, void *), void *ctx) {
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
279   if (ctx) tao->user_objP = ctx;
280   if (func) tao->ops->computeobjective = func;
281   PetscFunctionReturn(0);
282 }
283 
284 /*@C
285   TaoGetObjective - Gets the function evaluation routine for the function to be minimized
286 
287   Not collective
288 
289   Input Parameter:
290 . tao - the Tao context
291 
292   Output Parameters
293 + func - the objective function
294 - ctx - the user-defined context for private data for the function evaluation
295 
296   Calling sequence of func:
297 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
298 
299 + x - input vector
300 . f - function value
301 - ctx - [optional] user-defined function context
302 
303   Level: beginner
304 
305 .seealso: `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
306 @*/
307 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal *, void *), void **ctx) {
308   PetscFunctionBegin;
309   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
310   if (func) *func = tao->ops->computeobjective;
311   if (ctx) *ctx = tao->user_objP;
312   PetscFunctionReturn(0);
313 }
314 
315 /*@C
316   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
317 
318   Logically collective on tao
319 
320   Input Parameters:
321 + tao - the Tao context
322 . func - the residual evaluation routine
323 - ctx - [optional] user-defined context for private data for the function evaluation
324         routine (may be NULL)
325 
326   Calling sequence of func:
327 $      func (Tao tao, Vec x, Vec f, void *ctx);
328 
329 + x - input vector
330 . f - function value vector
331 - ctx - [optional] user-defined function context
332 
333   Level: beginner
334 
335 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
336 @*/
337 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx) {
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
340   PetscValidHeaderSpecific(res, VEC_CLASSID, 2);
341   PetscCall(PetscObjectReference((PetscObject)res));
342   if (tao->ls_res) { PetscCall(VecDestroy(&tao->ls_res)); }
343   tao->ls_res               = res;
344   tao->user_lsresP          = ctx;
345   tao->ops->computeresidual = func;
346 
347   PetscFunctionReturn(0);
348 }
349 
350 /*@
351   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
352    If this function is not provided, or if sigma_v and sigma_w are both NULL, then the identity matrix will be used for weights.
353 
354   Collective on tao
355 
356   Input Parameters:
357 + tao - the Tao context
358 . sigma_v - vector of weights (diagonal terms only)
359 . n       - the number of weights (if using off-diagonal)
360 . rows    - index list of rows for sigma_w
361 . cols    - index list of columns for sigma_w
362 - vals - array of weights
363 
364   Note: Either sigma_v or sigma_w (or both) should be NULL
365 
366   Level: intermediate
367 
368 .seealso: `Tao`, `TaoSetResidualRoutine()`
369 @*/
370 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals) {
371   PetscInt i;
372 
373   PetscFunctionBegin;
374   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
375   if (sigma_v) PetscValidHeaderSpecific(sigma_v, VEC_CLASSID, 2);
376   PetscCall(PetscObjectReference((PetscObject)sigma_v));
377   PetscCall(VecDestroy(&tao->res_weights_v));
378   tao->res_weights_v = sigma_v;
379   if (vals) {
380     PetscCall(PetscFree(tao->res_weights_rows));
381     PetscCall(PetscFree(tao->res_weights_cols));
382     PetscCall(PetscFree(tao->res_weights_w));
383     PetscCall(PetscMalloc1(n, &tao->res_weights_rows));
384     PetscCall(PetscMalloc1(n, &tao->res_weights_cols));
385     PetscCall(PetscMalloc1(n, &tao->res_weights_w));
386     tao->res_weights_n = n;
387     for (i = 0; i < n; i++) {
388       tao->res_weights_rows[i] = rows[i];
389       tao->res_weights_cols[i] = cols[i];
390       tao->res_weights_w[i]    = vals[i];
391     }
392   } else {
393     tao->res_weights_n    = 0;
394     tao->res_weights_rows = NULL;
395     tao->res_weights_cols = NULL;
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 /*@
401   TaoComputeResidual - Computes a least-squares residual vector at a given point
402 
403   Collective on tao
404 
405   Input Parameters:
406 + tao - the Tao context
407 - X - input vector
408 
409   Output Parameter:
410 . f - Objective vector at X
411 
412   Notes:
413     `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
414   so most users would not generally call this routine themselves.
415 
416   Level: advanced
417 
418 .seealso: `Tao`, `TaoSetResidualRoutine()`
419 @*/
420 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F) {
421   PetscFunctionBegin;
422   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
423   PetscValidHeaderSpecific(X, VEC_CLASSID, 2);
424   PetscValidHeaderSpecific(F, VEC_CLASSID, 3);
425   PetscCheckSameComm(tao, 1, X, 2);
426   PetscCheckSameComm(tao, 1, F, 3);
427   if (tao->ops->computeresidual) {
428     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval, tao, X, NULL, NULL));
429     PetscCallBack("Tao callback least-squares residual", (*tao->ops->computeresidual)(tao, X, F, tao->user_lsresP));
430     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval, tao, X, NULL, NULL));
431     tao->nfuncs++;
432   } else SETERRQ(PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "TaoSetResidualRoutine() has not been called");
433   PetscCall(PetscInfo(tao, "TAO least-squares residual evaluation.\n"));
434   PetscFunctionReturn(0);
435 }
436 
437 /*@C
438   TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
439 
440   Logically collective on tao
441 
442   Input Parameters:
443 + tao - the Tao context
444 . g - [optional] the vector to internally hold the gradient computation
445 . func - the gradient function
446 - ctx - [optional] user-defined context for private data for the gradient evaluation
447         routine (may be NULL)
448 
449   Calling sequence of func:
450 $      func (Tao tao, Vec x, Vec g, void *ctx);
451 
452 + x - input vector
453 . g - gradient value (output)
454 - ctx - [optional] user-defined function context
455 
456   Level: beginner
457 
458 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
459 @*/
460 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void *), void *ctx) {
461   PetscFunctionBegin;
462   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
463   if (g) {
464     PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
465     PetscCheckSameComm(tao, 1, g, 2);
466     PetscCall(PetscObjectReference((PetscObject)g));
467     PetscCall(VecDestroy(&tao->gradient));
468     tao->gradient = g;
469   }
470   if (func) tao->ops->computegradient = func;
471   if (ctx) tao->user_gradP = ctx;
472   PetscFunctionReturn(0);
473 }
474 
475 /*@C
476   TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
477 
478   Not collective
479 
480   Input Parameter:
481 . tao - the Tao context
482 
483   Output Parameters:
484 + g - the vector to internally hold the gradient computation
485 . func - the gradient function
486 - ctx - user-defined context for private data for the gradient evaluation routine
487 
488   Calling sequence of func:
489 $      func (Tao tao, Vec x, Vec g, void *ctx);
490 
491 + x - input vector
492 . g - gradient value (output)
493 - ctx - [optional] user-defined function context
494 
495   Level: beginner
496 
497 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
498 @*/
499 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void *), void **ctx) {
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
502   if (g) *g = tao->gradient;
503   if (func) *func = tao->ops->computegradient;
504   if (ctx) *ctx = tao->user_gradP;
505   PetscFunctionReturn(0);
506 }
507 
508 /*@C
509   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
510 
511   Logically collective on tao
512 
513   Input Parameters:
514 + tao - the Tao context
515 . g - [optional] the vector to internally hold the gradient computation
516 . func - the gradient function
517 - ctx - [optional] user-defined context for private data for the gradient evaluation
518         routine (may be NULL)
519 
520   Calling sequence of func:
521 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
522 
523 + x - input vector
524 . f - objective value (output)
525 . g - gradient value (output)
526 - ctx - [optional] user-defined function context
527 
528   Level: beginner
529 
530   Note:
531   For some optimization methods using a combined function can be more eifficient.
532 
533 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
534 @*/
535 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) {
536   PetscFunctionBegin;
537   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
538   if (g) {
539     PetscValidHeaderSpecific(g, VEC_CLASSID, 2);
540     PetscCheckSameComm(tao, 1, g, 2);
541     PetscCall(PetscObjectReference((PetscObject)g));
542     PetscCall(VecDestroy(&tao->gradient));
543     tao->gradient = g;
544   }
545   if (ctx) tao->user_objgradP = ctx;
546   if (func) tao->ops->computeobjectiveandgradient = func;
547   PetscFunctionReturn(0);
548 }
549 
550 /*@C
551   TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
552 
553   Not collective
554 
555   Input Parameter:
556 . tao - the Tao context
557 
558   Output Parameters:
559 + g - the vector to internally hold the gradient computation
560 . func - the gradient function
561 - ctx - user-defined context for private data for the gradient evaluation routine
562 
563   Calling sequence of func:
564 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
565 
566 + x - input vector
567 . f - objective value (output)
568 . g - gradient value (output)
569 - ctx - [optional] user-defined function context
570 
571   Level: beginner
572 
573 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
574 @*/
575 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal *, Vec, void *), void **ctx) {
576   PetscFunctionBegin;
577   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
578   if (g) *g = tao->gradient;
579   if (func) *func = tao->ops->computeobjectiveandgradient;
580   if (ctx) *ctx = tao->user_objgradP;
581   PetscFunctionReturn(0);
582 }
583 
584 /*@
585   TaoIsObjectiveDefined - Checks to see if the user has
586   declared an objective-only routine.  Useful for determining when
587   it is appropriate to call `TaoComputeObjective()` or
588   `TaoComputeObjectiveAndGradient()`
589 
590   Not collective
591 
592   Input Parameter:
593 . tao - the Tao context
594 
595   Output Parameter:
596 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
597 
598   Level: developer
599 
600 .seealso: `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
601 @*/
602 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg) {
603   PetscFunctionBegin;
604   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
605   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
606   else *flg = PETSC_TRUE;
607   PetscFunctionReturn(0);
608 }
609 
610 /*@
611   TaoIsGradientDefined - Checks to see if the user has
612   declared an objective-only routine.  Useful for determining when
613   it is appropriate to call `TaoComputeGradient()` or
614   `TaoComputeGradientAndGradient()`
615 
616   Not Collective
617 
618   Input Parameter:
619 . tao - the Tao context
620 
621   Output Parameter:
622 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
623 
624   Level: developer
625 
626 .seealso: `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
627 @*/
628 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg) {
629   PetscFunctionBegin;
630   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
631   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
632   else *flg = PETSC_TRUE;
633   PetscFunctionReturn(0);
634 }
635 
636 /*@
637   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
638   declared a joint objective/gradient routine.  Useful for determining when
639   it is appropriate to call `TaoComputeObjective()` or
640   `TaoComputeObjectiveAndGradient()`
641 
642   Not Collective
643 
644   Input Parameter:
645 . tao - the Tao context
646 
647   Output Parameter:
648 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
649 
650   Level: developer
651 
652 .seealso: `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
653 @*/
654 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg) {
655   PetscFunctionBegin;
656   PetscValidHeaderSpecific(tao, TAO_CLASSID, 1);
657   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
658   else *flg = PETSC_TRUE;
659   PetscFunctionReturn(0);
660 }
661