xref: /petsc/src/tao/interface/taosolver_fg.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetSolution - Sets the vector holding the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve(), TaoGetSolution()
14 @*/
15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16 {
17   PetscFunctionBegin;
18   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
19   if (x0) PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
20   PetscCall(PetscObjectReference((PetscObject)x0));
21   PetscCall(VecDestroy(&tao->solution));
22   tao->solution = x0;
23   PetscFunctionReturn(0);
24 }
25 
26 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
27 {
28   Vec               g2,g3;
29   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
30   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
31   PetscScalar       dot;
32   MPI_Comm          comm;
33   PetscViewer       viewer,mviewer;
34   PetscViewerFormat format;
35   PetscInt          tabs;
36   static PetscBool  directionsprinted = PETSC_FALSE;
37   PetscErrorCode    ierr;
38 
39   PetscFunctionBegin;
40   ierr = PetscObjectOptionsBegin((PetscObject)tao);PetscCall(ierr);
41   PetscCall(PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test));
42   PetscCall(PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print));
43   ierr = PetscOptionsEnd();PetscCall(ierr);
44   if (!test) {
45     if (complete_print) {
46       PetscCall(PetscViewerDestroy(&mviewer));
47     }
48     PetscFunctionReturn(0);
49   }
50 
51   PetscCall(PetscObjectGetComm((PetscObject)tao,&comm));
52   PetscCall(PetscViewerASCIIGetStdout(comm,&viewer));
53   PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
54   PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
55   PetscCall(PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n"));
56   if (!complete_print && !directionsprinted) {
57     PetscCall(PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
58     PetscCall(PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
59   }
60   if (!directionsprinted) {
61     PetscCall(PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
62     PetscCall(PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Gradient is probably correct.\n"));
63     directionsprinted = PETSC_TRUE;
64   }
65   if (complete_print) {
66     PetscCall(PetscViewerPushFormat(mviewer,format));
67   }
68 
69   PetscCall(VecDuplicate(x,&g2));
70   PetscCall(VecDuplicate(x,&g3));
71 
72   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
73   PetscCall(TaoDefaultComputeGradient(tao,x,g2,NULL));
74 
75   PetscCall(VecNorm(g2,NORM_2,&fdnorm));
76   PetscCall(VecNorm(g1,NORM_2,&hcnorm));
77   PetscCall(VecNorm(g2,NORM_INFINITY,&fdmax));
78   PetscCall(VecNorm(g1,NORM_INFINITY,&hcmax));
79   PetscCall(VecDot(g1,g2,&dot));
80   PetscCall(VecCopy(g1,g3));
81   PetscCall(VecAXPY(g3,-1.0,g2));
82   PetscCall(VecNorm(g3,NORM_2,&diffnorm));
83   PetscCall(VecNorm(g3,NORM_INFINITY,&diffmax));
84   PetscCall(PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm))));
85   PetscCall(PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm));
86   PetscCall(PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax));
87 
88   if (complete_print) {
89     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n"));
90     PetscCall(VecView(g1,mviewer));
91     PetscCall(PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n"));
92     PetscCall(VecView(g2,mviewer));
93     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n"));
94     PetscCall(VecView(g3,mviewer));
95   }
96   PetscCall(VecDestroy(&g2));
97   PetscCall(VecDestroy(&g3));
98 
99   if (complete_print) {
100     PetscCall(PetscViewerPopFormat(mviewer));
101     PetscCall(PetscViewerDestroy(&mviewer));
102   }
103   PetscCall(PetscViewerASCIISetTab(viewer,tabs));
104   PetscFunctionReturn(0);
105 }
106 
107 /*@
108   TaoComputeGradient - Computes the gradient of the objective function
109 
110   Collective on Tao
111 
112   Input Parameters:
113 + tao - the Tao context
114 - X - input vector
115 
116   Output Parameter:
117 . G - gradient vector
118 
119   Options Database Keys:
120 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
121 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
122 
123   Notes:
124     TaoComputeGradient() is typically used within minimization implementations,
125   so most users would not generally call this routine themselves.
126 
127   Level: advanced
128 
129 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradient()
130 @*/
131 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
132 {
133   PetscReal      dummy;
134 
135   PetscFunctionBegin;
136   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
137   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
138   PetscValidHeaderSpecific(G,VEC_CLASSID,3);
139   PetscCheckSameComm(tao,1,X,2);
140   PetscCheckSameComm(tao,1,G,3);
141   PetscCall(VecLockReadPush(X));
142   if (tao->ops->computegradient) {
143     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
144     PetscStackPush("Tao user gradient evaluation routine");
145     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
146     PetscStackPop;
147     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
148     tao->ngrads++;
149   } else if (tao->ops->computeobjectiveandgradient) {
150     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
151     PetscStackPush("Tao user objective/gradient evaluation routine");
152     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP));
153     PetscStackPop;
154     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
155     tao->nfuncgrads++;
156   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradient() has not been called");
157   PetscCall(VecLockReadPop(X));
158 
159   PetscCall(TaoTestGradient(tao,X,G));
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164   TaoComputeObjective - Computes the objective function value at a given point
165 
166   Collective on Tao
167 
168   Input Parameters:
169 + tao - the Tao context
170 - X - input vector
171 
172   Output Parameter:
173 . f - Objective value at X
174 
175   Notes:
176     TaoComputeObjective() is typically used within minimization implementations,
177   so most users would not generally call this routine themselves.
178 
179   Level: advanced
180 
181 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
182 @*/
183 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
184 {
185   Vec            temp;
186 
187   PetscFunctionBegin;
188   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
189   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
190   PetscCheckSameComm(tao,1,X,2);
191   PetscCall(VecLockReadPush(X));
192   if (tao->ops->computeobjective) {
193     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
194     PetscStackPush("Tao user objective evaluation routine");
195     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
196     PetscStackPop;
197     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
198     tao->nfuncs++;
199   } else if (tao->ops->computeobjectiveandgradient) {
200     PetscCall(PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n"));
201     PetscCall(VecDuplicate(X,&temp));
202     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL));
203     PetscStackPush("Tao user objective/gradient evaluation routine");
204     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP));
205     PetscStackPop;
206     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL));
207     PetscCall(VecDestroy(&temp));
208     tao->nfuncgrads++;
209   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() has not been called");
210   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
211   PetscCall(VecLockReadPop(X));
212   PetscFunctionReturn(0);
213 }
214 
215 /*@
216   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
217 
218   Collective on Tao
219 
220   Input Parameters:
221 + tao - the Tao context
222 - X - input vector
223 
224   Output Parameters:
225 + f - Objective value at X
226 - g - Gradient vector at X
227 
228   Notes:
229     TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
230   so most users would not generally call this routine themselves.
231 
232   Level: advanced
233 
234 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
235 @*/
236 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
237 {
238   PetscFunctionBegin;
239   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
240   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
241   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
242   PetscCheckSameComm(tao,1,X,2);
243   PetscCheckSameComm(tao,1,G,4);
244   PetscCall(VecLockReadPush(X));
245   if (tao->ops->computeobjectiveandgradient) {
246     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
247     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
248       PetscCall(TaoComputeObjective(tao,X,f));
249       PetscCall(TaoDefaultComputeGradient(tao,X,G,NULL));
250     } else {
251       PetscStackPush("Tao user objective/gradient evaluation routine");
252       PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP));
253       PetscStackPop;
254     }
255     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
256     tao->nfuncgrads++;
257   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
258     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
259     PetscStackPush("Tao user objective evaluation routine");
260     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
261     PetscStackPop;
262     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
263     tao->nfuncs++;
264     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
265     PetscStackPush("Tao user gradient evaluation routine");
266     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
267     PetscStackPop;
268     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
269     tao->ngrads++;
270   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() or TaoSetGradient() not set");
271   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
272   PetscCall(VecLockReadPop(X));
273 
274   PetscCall(TaoTestGradient(tao,X,G));
275   PetscFunctionReturn(0);
276 }
277 
278 /*@C
279   TaoSetObjective - Sets the function evaluation routine for minimization
280 
281   Logically collective on Tao
282 
283   Input Parameters:
284 + tao - the Tao context
285 . func - the objective function
286 - ctx - [optional] user-defined context for private data for the function evaluation
287         routine (may be NULL)
288 
289   Calling sequence of func:
290 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
291 
292 + x - input vector
293 . f - function value
294 - ctx - [optional] user-defined function context
295 
296   Level: beginner
297 
298 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetObjective()
299 @*/
300 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
304   if (ctx) tao->user_objP = ctx;
305   if (func) tao->ops->computeobjective = func;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310   TaoGetObjective - Gets the function evaluation routine for minimization
311 
312   Not collective
313 
314   Input Parameter:
315 . tao - the Tao context
316 
317   Output Parameters
318 + func - the objective function
319 - ctx - the user-defined context for private data for the function evaluation
320 
321   Calling sequence of func:
322 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
323 
324 + x - input vector
325 . f - function value
326 - ctx - [optional] user-defined function context
327 
328   Level: beginner
329 
330 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjective()
331 @*/
332 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal*,void*),void **ctx)
333 {
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
336   if (func) *func = tao->ops->computeobjective;
337   if (ctx) *ctx = tao->user_objP;
338   PetscFunctionReturn(0);
339 }
340 
341 /*@C
342   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
343 
344   Logically collective on Tao
345 
346   Input Parameters:
347 + tao - the Tao context
348 . func - the residual evaluation routine
349 - ctx - [optional] user-defined context for private data for the function evaluation
350         routine (may be NULL)
351 
352   Calling sequence of func:
353 $      func (Tao tao, Vec x, Vec f, void *ctx);
354 
355 + x - input vector
356 . f - function value vector
357 - ctx - [optional] user-defined function context
358 
359   Level: beginner
360 
361 .seealso: TaoSetObjective(), TaoSetJacobianRoutine()
362 @*/
363 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
364 {
365   PetscFunctionBegin;
366   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
367   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
368   PetscCall(PetscObjectReference((PetscObject)res));
369   if (tao->ls_res) {
370     PetscCall(VecDestroy(&tao->ls_res));
371   }
372   tao->ls_res = res;
373   tao->user_lsresP = ctx;
374   tao->ops->computeresidual = func;
375 
376   PetscFunctionReturn(0);
377 }
378 
379 /*@
380   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
381 
382   Collective on Tao
383 
384   Input Parameters:
385 + tao - the Tao context
386 . sigma_v - vector of weights (diagonal terms only)
387 . n       - the number of weights (if using off-diagonal)
388 . rows    - index list of rows for sigma_w
389 . cols    - index list of columns for sigma_w
390 - vals - array of weights
391 
392   Note: Either sigma_v or sigma_w (or both) should be NULL
393 
394   Level: intermediate
395 
396 .seealso: TaoSetResidualRoutine()
397 @*/
398 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
399 {
400   PetscInt       i;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
404   if (sigma_v) PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2);
405   PetscCall(PetscObjectReference((PetscObject)sigma_v));
406   PetscCall(VecDestroy(&tao->res_weights_v));
407   tao->res_weights_v = sigma_v;
408   if (vals) {
409     PetscCall(PetscFree(tao->res_weights_rows));
410     PetscCall(PetscFree(tao->res_weights_cols));
411     PetscCall(PetscFree(tao->res_weights_w));
412     PetscCall(PetscMalloc1(n,&tao->res_weights_rows));
413     PetscCall(PetscMalloc1(n,&tao->res_weights_cols));
414     PetscCall(PetscMalloc1(n,&tao->res_weights_w));
415     tao->res_weights_n = n;
416     for (i=0;i<n;i++) {
417       tao->res_weights_rows[i] = rows[i];
418       tao->res_weights_cols[i] = cols[i];
419       tao->res_weights_w[i] = vals[i];
420     }
421   } else {
422     tao->res_weights_n = 0;
423     tao->res_weights_rows = NULL;
424     tao->res_weights_cols = NULL;
425   }
426   PetscFunctionReturn(0);
427 }
428 
429 /*@
430   TaoComputeResidual - Computes a least-squares residual vector at a given point
431 
432   Collective on Tao
433 
434   Input Parameters:
435 + tao - the Tao context
436 - X - input vector
437 
438   Output Parameter:
439 . f - Objective vector at X
440 
441   Notes:
442     TaoComputeResidual() is typically used within minimization implementations,
443   so most users would not generally call this routine themselves.
444 
445   Level: advanced
446 
447 .seealso: TaoSetResidualRoutine()
448 @*/
449 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
450 {
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
453   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
454   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
455   PetscCheckSameComm(tao,1,X,2);
456   PetscCheckSameComm(tao,1,F,3);
457   if (tao->ops->computeresidual) {
458     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
459     PetscStackPush("Tao user least-squares residual evaluation routine");
460     PetscCall((*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP));
461     PetscStackPop;
462     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
463     tao->nfuncs++;
464   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
465   PetscCall(PetscInfo(tao,"TAO least-squares residual evaluation.\n"));
466   PetscFunctionReturn(0);
467 }
468 
469 /*@C
470   TaoSetGradient - Sets the gradient evaluation routine for minimization
471 
472   Logically collective on Tao
473 
474   Input Parameters:
475 + tao - the Tao context
476 . g - [optional] the vector to internally hold the gradient computation
477 . func - the gradient function
478 - ctx - [optional] user-defined context for private data for the gradient evaluation
479         routine (may be NULL)
480 
481   Calling sequence of func:
482 $      func (Tao tao, Vec x, Vec g, void *ctx);
483 
484 + x - input vector
485 . g - gradient value (output)
486 - ctx - [optional] user-defined function context
487 
488   Level: beginner
489 
490 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetGradient()
491 @*/
492 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
493 {
494   PetscFunctionBegin;
495   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
496   if (g) {
497     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
498     PetscCheckSameComm(tao,1,g,2);
499     PetscCall(PetscObjectReference((PetscObject)g));
500     PetscCall(VecDestroy(&tao->gradient));
501     tao->gradient = g;
502   }
503   if (func) tao->ops->computegradient = func;
504   if (ctx) tao->user_gradP = ctx;
505   PetscFunctionReturn(0);
506 }
507 
508 /*@C
509   TaoGetGradient - Gets the gradient evaluation routine for minimization
510 
511   Not collective
512 
513   Input Parameter:
514 . tao - the Tao context
515 
516   Output Parameters:
517 + g - the vector to internally hold the gradient computation
518 . func - the gradient function
519 - ctx - user-defined context for private data for the gradient evaluation routine
520 
521   Calling sequence of func:
522 $      func (Tao tao, Vec x, Vec g, void *ctx);
523 
524 + x - input vector
525 . g - gradient value (output)
526 - ctx - [optional] user-defined function context
527 
528   Level: beginner
529 
530 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoSetGradient()
531 @*/
532 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void*),void **ctx)
533 {
534   PetscFunctionBegin;
535   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
536   if (g) *g = tao->gradient;
537   if (func) *func = tao->ops->computegradient;
538   if (ctx) *ctx = tao->user_gradP;
539   PetscFunctionReturn(0);
540 }
541 
542 /*@C
543   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for minimization
544 
545   Logically collective on Tao
546 
547   Input Parameters:
548 + tao - the Tao context
549 . g - [optional] the vector to internally hold the gradient computation
550 . func - the gradient function
551 - ctx - [optional] user-defined context for private data for the gradient evaluation
552         routine (may be NULL)
553 
554   Calling sequence of func:
555 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
556 
557 + x - input vector
558 . f - objective value (output)
559 . g - gradient value (output)
560 - ctx - [optional] user-defined function context
561 
562   Level: beginner
563 
564 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetGradient(), TaoGetObjectiveAndGradient()
565 @*/
566 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
567 {
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
570   if (g) {
571     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
572     PetscCheckSameComm(tao,1,g,2);
573     PetscCall(PetscObjectReference((PetscObject)g));
574     PetscCall(VecDestroy(&tao->gradient));
575     tao->gradient = g;
576   }
577   if (ctx) tao->user_objgradP = ctx;
578   if (func) tao->ops->computeobjectiveandgradient = func;
579   PetscFunctionReturn(0);
580 }
581 
582 /*@C
583   TaoGetObjectiveAndGradient - Gets a combined objective function and gradient evaluation routine for minimization
584 
585   Not collective
586 
587   Input Parameter:
588 . tao - the Tao context
589 
590   Output Parameters:
591 + g - the vector to internally hold the gradient computation
592 . func - the gradient function
593 - ctx - user-defined context for private data for the gradient evaluation routine
594 
595   Calling sequence of func:
596 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
597 
598 + x - input vector
599 . f - objective value (output)
600 . g - gradient value (output)
601 - ctx - [optional] user-defined function context
602 
603   Level: beginner
604 
605 .seealso: TaoSetObjective(), TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient()
606 @*/
607 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal*, Vec, void*), void **ctx)
608 {
609   PetscFunctionBegin;
610   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
611   if (g) *g = tao->gradient;
612   if (func) *func = tao->ops->computeobjectiveandgradient;
613   if (ctx) *ctx = tao->user_objgradP;
614   PetscFunctionReturn(0);
615 }
616 
617 /*@
618   TaoIsObjectiveDefined - Checks to see if the user has
619   declared an objective-only routine.  Useful for determining when
620   it is appropriate to call TaoComputeObjective() or
621   TaoComputeObjectiveAndGradient()
622 
623   Not collective
624 
625   Input Parameter:
626 . tao - the Tao context
627 
628   Output Parameter:
629 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
630 
631   Level: developer
632 
633 .seealso: TaoSetObjective(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
634 @*/
635 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
636 {
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
639   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
640   else *flg = PETSC_TRUE;
641   PetscFunctionReturn(0);
642 }
643 
644 /*@
645   TaoIsGradientDefined - Checks to see if the user has
646   declared an objective-only routine.  Useful for determining when
647   it is appropriate to call TaoComputeGradient() or
648   TaoComputeGradientAndGradient()
649 
650   Not Collective
651 
652   Input Parameter:
653 . tao - the Tao context
654 
655   Output Parameter:
656 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
657 
658   Level: developer
659 
660 .seealso: TaoSetGradient(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
661 @*/
662 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
663 {
664   PetscFunctionBegin;
665   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
666   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
667   else *flg = PETSC_TRUE;
668   PetscFunctionReturn(0);
669 }
670 
671 /*@
672   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
673   declared a joint objective/gradient routine.  Useful for determining when
674   it is appropriate to call TaoComputeObjective() or
675   TaoComputeObjectiveAndGradient()
676 
677   Not Collective
678 
679   Input Parameter:
680 . tao - the Tao context
681 
682   Output Parameter:
683 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
684 
685   Level: developer
686 
687 .seealso: TaoSetObjectiveAndGradient(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
688 @*/
689 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
690 {
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
693   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
694   else *flg = PETSC_TRUE;
695   PetscFunctionReturn(0);
696 }
697