xref: /petsc/src/tao/interface/taosolver_fg.c (revision ef46b1a67e276116c83b5d4ce8efc2932ea4fc0a)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetSolution - Sets the vector holding the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve(), TaoGetSolution()
14 @*/
15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16 {
17   PetscFunctionBegin;
18   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
19   if (x0) PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
20   PetscCall(PetscObjectReference((PetscObject)x0));
21   PetscCall(VecDestroy(&tao->solution));
22   tao->solution = x0;
23   PetscFunctionReturn(0);
24 }
25 
26 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
27 {
28   Vec               g2,g3;
29   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
30   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
31   PetscScalar       dot;
32   MPI_Comm          comm;
33   PetscViewer       viewer,mviewer;
34   PetscViewerFormat format;
35   PetscInt          tabs;
36   static PetscBool  directionsprinted = PETSC_FALSE;
37 
38   PetscFunctionBegin;
39   PetscObjectOptionsBegin((PetscObject)tao);
40   PetscCall(PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test));
41   PetscCall(PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print));
42   PetscOptionsEnd();
43   if (!test) {
44     if (complete_print) {
45       PetscCall(PetscViewerDestroy(&mviewer));
46     }
47     PetscFunctionReturn(0);
48   }
49 
50   PetscCall(PetscObjectGetComm((PetscObject)tao,&comm));
51   PetscCall(PetscViewerASCIIGetStdout(comm,&viewer));
52   PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
53   PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
54   PetscCall(PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n"));
55   if (!complete_print && !directionsprinted) {
56     PetscCall(PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
57     PetscCall(PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
58   }
59   if (!directionsprinted) {
60     PetscCall(PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
61     PetscCall(PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Gradient is probably correct.\n"));
62     directionsprinted = PETSC_TRUE;
63   }
64   if (complete_print) {
65     PetscCall(PetscViewerPushFormat(mviewer,format));
66   }
67 
68   PetscCall(VecDuplicate(x,&g2));
69   PetscCall(VecDuplicate(x,&g3));
70 
71   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
72   PetscCall(TaoDefaultComputeGradient(tao,x,g2,NULL));
73 
74   PetscCall(VecNorm(g2,NORM_2,&fdnorm));
75   PetscCall(VecNorm(g1,NORM_2,&hcnorm));
76   PetscCall(VecNorm(g2,NORM_INFINITY,&fdmax));
77   PetscCall(VecNorm(g1,NORM_INFINITY,&hcmax));
78   PetscCall(VecDot(g1,g2,&dot));
79   PetscCall(VecCopy(g1,g3));
80   PetscCall(VecAXPY(g3,-1.0,g2));
81   PetscCall(VecNorm(g3,NORM_2,&diffnorm));
82   PetscCall(VecNorm(g3,NORM_INFINITY,&diffmax));
83   PetscCall(PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm))));
84   PetscCall(PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm));
85   PetscCall(PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax));
86 
87   if (complete_print) {
88     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n"));
89     PetscCall(VecView(g1,mviewer));
90     PetscCall(PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n"));
91     PetscCall(VecView(g2,mviewer));
92     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n"));
93     PetscCall(VecView(g3,mviewer));
94   }
95   PetscCall(VecDestroy(&g2));
96   PetscCall(VecDestroy(&g3));
97 
98   if (complete_print) {
99     PetscCall(PetscViewerPopFormat(mviewer));
100     PetscCall(PetscViewerDestroy(&mviewer));
101   }
102   PetscCall(PetscViewerASCIISetTab(viewer,tabs));
103   PetscFunctionReturn(0);
104 }
105 
106 /*@
107   TaoComputeGradient - Computes the gradient of the objective function
108 
109   Collective on Tao
110 
111   Input Parameters:
112 + tao - the Tao context
113 - X - input vector
114 
115   Output Parameter:
116 . G - gradient vector
117 
118   Options Database Keys:
119 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
120 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
121 
122   Notes:
123     TaoComputeGradient() is typically used within minimization implementations,
124   so most users would not generally call this routine themselves.
125 
126   Level: advanced
127 
128 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradient()
129 @*/
130 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
131 {
132   PetscReal      dummy;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
136   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
137   PetscValidHeaderSpecific(G,VEC_CLASSID,3);
138   PetscCheckSameComm(tao,1,X,2);
139   PetscCheckSameComm(tao,1,G,3);
140   PetscCall(VecLockReadPush(X));
141   if (tao->ops->computegradient) {
142     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
143     PetscStackPush("Tao user gradient evaluation routine");
144     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
145     PetscStackPop;
146     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
147     tao->ngrads++;
148   } else if (tao->ops->computeobjectiveandgradient) {
149     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
150     PetscStackPush("Tao user objective/gradient evaluation routine");
151     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP));
152     PetscStackPop;
153     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
154     tao->nfuncgrads++;
155   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradient() has not been called");
156   PetscCall(VecLockReadPop(X));
157 
158   PetscCall(TaoTestGradient(tao,X,G));
159   PetscFunctionReturn(0);
160 }
161 
162 /*@
163   TaoComputeObjective - Computes the objective function value at a given point
164 
165   Collective on Tao
166 
167   Input Parameters:
168 + tao - the Tao context
169 - X - input vector
170 
171   Output Parameter:
172 . f - Objective value at X
173 
174   Notes:
175     TaoComputeObjective() is typically used within minimization implementations,
176   so most users would not generally call this routine themselves.
177 
178   Level: advanced
179 
180 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
181 @*/
182 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
183 {
184   Vec            temp;
185 
186   PetscFunctionBegin;
187   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
188   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
189   PetscCheckSameComm(tao,1,X,2);
190   PetscCall(VecLockReadPush(X));
191   if (tao->ops->computeobjective) {
192     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
193     PetscStackPush("Tao user objective evaluation routine");
194     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
195     PetscStackPop;
196     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
197     tao->nfuncs++;
198   } else if (tao->ops->computeobjectiveandgradient) {
199     PetscCall(PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n"));
200     PetscCall(VecDuplicate(X,&temp));
201     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL));
202     PetscStackPush("Tao user objective/gradient evaluation routine");
203     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP));
204     PetscStackPop;
205     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL));
206     PetscCall(VecDestroy(&temp));
207     tao->nfuncgrads++;
208   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() has not been called");
209   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
210   PetscCall(VecLockReadPop(X));
211   PetscFunctionReturn(0);
212 }
213 
214 /*@
215   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
216 
217   Collective on Tao
218 
219   Input Parameters:
220 + tao - the Tao context
221 - X - input vector
222 
223   Output Parameters:
224 + f - Objective value at X
225 - g - Gradient vector at X
226 
227   Notes:
228     TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
229   so most users would not generally call this routine themselves.
230 
231   Level: advanced
232 
233 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
234 @*/
235 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
236 {
237   PetscFunctionBegin;
238   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
239   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
240   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
241   PetscCheckSameComm(tao,1,X,2);
242   PetscCheckSameComm(tao,1,G,4);
243   PetscCall(VecLockReadPush(X));
244   if (tao->ops->computeobjectiveandgradient) {
245     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
246     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
247       PetscCall(TaoComputeObjective(tao,X,f));
248       PetscCall(TaoDefaultComputeGradient(tao,X,G,NULL));
249     } else {
250       PetscStackPush("Tao user objective/gradient evaluation routine");
251       PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP));
252       PetscStackPop;
253     }
254     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
255     tao->nfuncgrads++;
256   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
257     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
258     PetscStackPush("Tao user objective evaluation routine");
259     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
260     PetscStackPop;
261     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
262     tao->nfuncs++;
263     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
264     PetscStackPush("Tao user gradient evaluation routine");
265     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
266     PetscStackPop;
267     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
268     tao->ngrads++;
269   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() or TaoSetGradient() not set");
270   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
271   PetscCall(VecLockReadPop(X));
272 
273   PetscCall(TaoTestGradient(tao,X,G));
274   PetscFunctionReturn(0);
275 }
276 
277 /*@C
278   TaoSetObjective - Sets the function evaluation routine for minimization
279 
280   Logically collective on Tao
281 
282   Input Parameters:
283 + tao - the Tao context
284 . func - the objective function
285 - ctx - [optional] user-defined context for private data for the function evaluation
286         routine (may be NULL)
287 
288   Calling sequence of func:
289 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
290 
291 + x - input vector
292 . f - function value
293 - ctx - [optional] user-defined function context
294 
295   Level: beginner
296 
297 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetObjective()
298 @*/
299 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
300 {
301   PetscFunctionBegin;
302   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
303   if (ctx) tao->user_objP = ctx;
304   if (func) tao->ops->computeobjective = func;
305   PetscFunctionReturn(0);
306 }
307 
308 /*@C
309   TaoGetObjective - Gets the function evaluation routine for minimization
310 
311   Not collective
312 
313   Input Parameter:
314 . tao - the Tao context
315 
316   Output Parameters
317 + func - the objective function
318 - ctx - the user-defined context for private data for the function evaluation
319 
320   Calling sequence of func:
321 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
322 
323 + x - input vector
324 . f - function value
325 - ctx - [optional] user-defined function context
326 
327   Level: beginner
328 
329 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjective()
330 @*/
331 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal*,void*),void **ctx)
332 {
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
335   if (func) *func = tao->ops->computeobjective;
336   if (ctx) *ctx = tao->user_objP;
337   PetscFunctionReturn(0);
338 }
339 
340 /*@C
341   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
342 
343   Logically collective on Tao
344 
345   Input Parameters:
346 + tao - the Tao context
347 . func - the residual evaluation routine
348 - ctx - [optional] user-defined context for private data for the function evaluation
349         routine (may be NULL)
350 
351   Calling sequence of func:
352 $      func (Tao tao, Vec x, Vec f, void *ctx);
353 
354 + x - input vector
355 . f - function value vector
356 - ctx - [optional] user-defined function context
357 
358   Level: beginner
359 
360 .seealso: TaoSetObjective(), TaoSetJacobianRoutine()
361 @*/
362 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
363 {
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
366   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
367   PetscCall(PetscObjectReference((PetscObject)res));
368   if (tao->ls_res) {
369     PetscCall(VecDestroy(&tao->ls_res));
370   }
371   tao->ls_res = res;
372   tao->user_lsresP = ctx;
373   tao->ops->computeresidual = func;
374 
375   PetscFunctionReturn(0);
376 }
377 
378 /*@
379   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
380 
381   Collective on Tao
382 
383   Input Parameters:
384 + tao - the Tao context
385 . sigma_v - vector of weights (diagonal terms only)
386 . n       - the number of weights (if using off-diagonal)
387 . rows    - index list of rows for sigma_w
388 . cols    - index list of columns for sigma_w
389 - vals - array of weights
390 
391   Note: Either sigma_v or sigma_w (or both) should be NULL
392 
393   Level: intermediate
394 
395 .seealso: TaoSetResidualRoutine()
396 @*/
397 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
398 {
399   PetscInt       i;
400 
401   PetscFunctionBegin;
402   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
403   if (sigma_v) PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2);
404   PetscCall(PetscObjectReference((PetscObject)sigma_v));
405   PetscCall(VecDestroy(&tao->res_weights_v));
406   tao->res_weights_v = sigma_v;
407   if (vals) {
408     PetscCall(PetscFree(tao->res_weights_rows));
409     PetscCall(PetscFree(tao->res_weights_cols));
410     PetscCall(PetscFree(tao->res_weights_w));
411     PetscCall(PetscMalloc1(n,&tao->res_weights_rows));
412     PetscCall(PetscMalloc1(n,&tao->res_weights_cols));
413     PetscCall(PetscMalloc1(n,&tao->res_weights_w));
414     tao->res_weights_n = n;
415     for (i=0;i<n;i++) {
416       tao->res_weights_rows[i] = rows[i];
417       tao->res_weights_cols[i] = cols[i];
418       tao->res_weights_w[i] = vals[i];
419     }
420   } else {
421     tao->res_weights_n = 0;
422     tao->res_weights_rows = NULL;
423     tao->res_weights_cols = NULL;
424   }
425   PetscFunctionReturn(0);
426 }
427 
428 /*@
429   TaoComputeResidual - Computes a least-squares residual vector at a given point
430 
431   Collective on Tao
432 
433   Input Parameters:
434 + tao - the Tao context
435 - X - input vector
436 
437   Output Parameter:
438 . f - Objective vector at X
439 
440   Notes:
441     TaoComputeResidual() is typically used within minimization implementations,
442   so most users would not generally call this routine themselves.
443 
444   Level: advanced
445 
446 .seealso: TaoSetResidualRoutine()
447 @*/
448 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
449 {
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
452   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
453   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
454   PetscCheckSameComm(tao,1,X,2);
455   PetscCheckSameComm(tao,1,F,3);
456   if (tao->ops->computeresidual) {
457     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
458     PetscStackPush("Tao user least-squares residual evaluation routine");
459     PetscCall((*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP));
460     PetscStackPop;
461     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
462     tao->nfuncs++;
463   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
464   PetscCall(PetscInfo(tao,"TAO least-squares residual evaluation.\n"));
465   PetscFunctionReturn(0);
466 }
467 
468 /*@C
469   TaoSetGradient - Sets the gradient evaluation routine for minimization
470 
471   Logically collective on Tao
472 
473   Input Parameters:
474 + tao - the Tao context
475 . g - [optional] the vector to internally hold the gradient computation
476 . func - the gradient function
477 - ctx - [optional] user-defined context for private data for the gradient evaluation
478         routine (may be NULL)
479 
480   Calling sequence of func:
481 $      func (Tao tao, Vec x, Vec g, void *ctx);
482 
483 + x - input vector
484 . g - gradient value (output)
485 - ctx - [optional] user-defined function context
486 
487   Level: beginner
488 
489 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetGradient()
490 @*/
491 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
492 {
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
495   if (g) {
496     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
497     PetscCheckSameComm(tao,1,g,2);
498     PetscCall(PetscObjectReference((PetscObject)g));
499     PetscCall(VecDestroy(&tao->gradient));
500     tao->gradient = g;
501   }
502   if (func) tao->ops->computegradient = func;
503   if (ctx) tao->user_gradP = ctx;
504   PetscFunctionReturn(0);
505 }
506 
507 /*@C
508   TaoGetGradient - Gets the gradient evaluation routine for minimization
509 
510   Not collective
511 
512   Input Parameter:
513 . tao - the Tao context
514 
515   Output Parameters:
516 + g - the vector to internally hold the gradient computation
517 . func - the gradient function
518 - ctx - user-defined context for private data for the gradient evaluation routine
519 
520   Calling sequence of func:
521 $      func (Tao tao, Vec x, Vec g, void *ctx);
522 
523 + x - input vector
524 . g - gradient value (output)
525 - ctx - [optional] user-defined function context
526 
527   Level: beginner
528 
529 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoSetGradient()
530 @*/
531 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void*),void **ctx)
532 {
533   PetscFunctionBegin;
534   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
535   if (g) *g = tao->gradient;
536   if (func) *func = tao->ops->computegradient;
537   if (ctx) *ctx = tao->user_gradP;
538   PetscFunctionReturn(0);
539 }
540 
541 /*@C
542   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for minimization
543 
544   Logically collective on Tao
545 
546   Input Parameters:
547 + tao - the Tao context
548 . g - [optional] the vector to internally hold the gradient computation
549 . func - the gradient function
550 - ctx - [optional] user-defined context for private data for the gradient evaluation
551         routine (may be NULL)
552 
553   Calling sequence of func:
554 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
555 
556 + x - input vector
557 . f - objective value (output)
558 . g - gradient value (output)
559 - ctx - [optional] user-defined function context
560 
561   Level: beginner
562 
563 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetGradient(), TaoGetObjectiveAndGradient()
564 @*/
565 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
566 {
567   PetscFunctionBegin;
568   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
569   if (g) {
570     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
571     PetscCheckSameComm(tao,1,g,2);
572     PetscCall(PetscObjectReference((PetscObject)g));
573     PetscCall(VecDestroy(&tao->gradient));
574     tao->gradient = g;
575   }
576   if (ctx) tao->user_objgradP = ctx;
577   if (func) tao->ops->computeobjectiveandgradient = func;
578   PetscFunctionReturn(0);
579 }
580 
581 /*@C
582   TaoGetObjectiveAndGradient - Gets a combined objective function and gradient evaluation routine for minimization
583 
584   Not collective
585 
586   Input Parameter:
587 . tao - the Tao context
588 
589   Output Parameters:
590 + g - the vector to internally hold the gradient computation
591 . func - the gradient function
592 - ctx - user-defined context for private data for the gradient evaluation routine
593 
594   Calling sequence of func:
595 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
596 
597 + x - input vector
598 . f - objective value (output)
599 . g - gradient value (output)
600 - ctx - [optional] user-defined function context
601 
602   Level: beginner
603 
604 .seealso: TaoSetObjective(), TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient()
605 @*/
606 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal*, Vec, void*), void **ctx)
607 {
608   PetscFunctionBegin;
609   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
610   if (g) *g = tao->gradient;
611   if (func) *func = tao->ops->computeobjectiveandgradient;
612   if (ctx) *ctx = tao->user_objgradP;
613   PetscFunctionReturn(0);
614 }
615 
616 /*@
617   TaoIsObjectiveDefined - Checks to see if the user has
618   declared an objective-only routine.  Useful for determining when
619   it is appropriate to call TaoComputeObjective() or
620   TaoComputeObjectiveAndGradient()
621 
622   Not collective
623 
624   Input Parameter:
625 . tao - the Tao context
626 
627   Output Parameter:
628 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
629 
630   Level: developer
631 
632 .seealso: TaoSetObjective(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
633 @*/
634 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
635 {
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
638   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
639   else *flg = PETSC_TRUE;
640   PetscFunctionReturn(0);
641 }
642 
643 /*@
644   TaoIsGradientDefined - Checks to see if the user has
645   declared an objective-only routine.  Useful for determining when
646   it is appropriate to call TaoComputeGradient() or
647   TaoComputeGradientAndGradient()
648 
649   Not Collective
650 
651   Input Parameter:
652 . tao - the Tao context
653 
654   Output Parameter:
655 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
656 
657   Level: developer
658 
659 .seealso: TaoSetGradient(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
660 @*/
661 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
662 {
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
665   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
666   else *flg = PETSC_TRUE;
667   PetscFunctionReturn(0);
668 }
669 
670 /*@
671   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
672   declared a joint objective/gradient routine.  Useful for determining when
673   it is appropriate to call TaoComputeObjective() or
674   TaoComputeObjectiveAndGradient()
675 
676   Not Collective
677 
678   Input Parameter:
679 . tao - the Tao context
680 
681   Output Parameter:
682 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
683 
684   Level: developer
685 
686 .seealso: TaoSetObjectiveAndGradient(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
687 @*/
688 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
689 {
690   PetscFunctionBegin;
691   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
692   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
693   else *flg = PETSC_TRUE;
694   PetscFunctionReturn(0);
695 }
696