xref: /petsc/src/tao/interface/taosolver_fg.c (revision 2e0aa6f095b7ddcaec754bc09276df3f6c1ffe42)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetInitialVector - Sets the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve()
14 @*/
15 
16 PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17 {
18   PetscErrorCode ierr;
19 
20   PetscFunctionBegin;
21   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
22   if (x0) {
23     PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
24     PetscObjectReference((PetscObject)x0);
25   }
26   ierr = VecDestroy(&tao->solution);CHKERRQ(ierr);
27   tao->solution = x0;
28   PetscFunctionReturn(0);
29 }
30 
31 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
32 {
33   Vec               g2,g3;
34   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
35   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
36   PetscScalar       dot;
37   MPI_Comm          comm;
38   PetscViewer       viewer,mviewer;
39   PetscViewerFormat format;
40   PetscInt          tabs;
41   static PetscBool  directionsprinted = PETSC_FALSE;
42   PetscErrorCode    ierr;
43 
44   PetscFunctionBegin;
45   ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr);
46   ierr = PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);CHKERRQ(ierr);
47   ierr = PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);CHKERRQ(ierr);
48   ierr = PetscOptionsEnd();CHKERRQ(ierr);
49   if (!test) PetscFunctionReturn(0);
50 
51   ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr);
52   ierr = PetscViewerASCIIGetStdout(comm,&viewer);CHKERRQ(ierr);
53   ierr = PetscViewerASCIIGetTab(viewer, &tabs);CHKERRQ(ierr);
54   ierr = PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);CHKERRQ(ierr);
55   ierr = PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n");CHKERRQ(ierr);
56   if (!complete_print && !directionsprinted) {
57     ierr = PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");CHKERRQ(ierr);
58     ierr = PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n");CHKERRQ(ierr);
59   }
60   if (!directionsprinted) {
61     ierr = PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||_F/||G||_F is\n");CHKERRQ(ierr);
62     ierr = PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Hessian is probably correct.\n");CHKERRQ(ierr);
63     directionsprinted = PETSC_TRUE;
64   }
65   if (complete_print) {
66     ierr = PetscViewerPushFormat(mviewer,format);CHKERRQ(ierr);
67   }
68 
69   ierr = VecDuplicate(x,&g2);CHKERRQ(ierr);
70   ierr = VecDuplicate(x,&g3);CHKERRQ(ierr);
71 
72   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
73   ierr = TaoDefaultComputeGradient(tao,x,g2,NULL);CHKERRQ(ierr);
74 
75   ierr = VecNorm(g2,NORM_2,&fdnorm);CHKERRQ(ierr);
76   ierr = VecNorm(g1,NORM_2,&hcnorm);CHKERRQ(ierr);
77   ierr = VecNorm(g2,NORM_INFINITY,&fdmax);CHKERRQ(ierr);
78   ierr = VecNorm(g1,NORM_INFINITY,&hcmax);CHKERRQ(ierr);
79   ierr = VecDot(g1,g2,&dot);CHKERRQ(ierr);
80   ierr = VecCopy(g1,g3);CHKERRQ(ierr);
81   ierr = VecAXPY(g3,-1.0,g2);CHKERRQ(ierr);
82   ierr = VecNorm(g3,NORM_2,&diffnorm);CHKERRQ(ierr);
83   ierr = VecNorm(g3,NORM_INFINITY,&diffmax);CHKERRQ(ierr);
84   ierr = PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));CHKERRQ(ierr);
85   ierr = PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);CHKERRQ(ierr);
86   ierr = PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);CHKERRQ(ierr);
87 
88   if (complete_print) {
89     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n");CHKERRQ(ierr);
90     ierr = VecView(g1,mviewer);CHKERRQ(ierr);
91     ierr = PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n");CHKERRQ(ierr);
92     ierr = VecView(g2,mviewer);CHKERRQ(ierr);
93     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n");CHKERRQ(ierr);
94     ierr = VecView(g3,mviewer);CHKERRQ(ierr);
95   }
96   ierr = VecDestroy(&g2);CHKERRQ(ierr);
97   ierr = VecDestroy(&g3);CHKERRQ(ierr);
98 
99   if (complete_print) {
100     ierr = PetscViewerPopFormat(mviewer);CHKERRQ(ierr);
101   }
102   ierr = PetscViewerASCIISetTab(viewer,tabs);CHKERRQ(ierr);
103   PetscFunctionReturn(0);
104 }
105 
106 /*@
107   TaoComputeGradient - Computes the gradient of the objective function
108 
109   Collective on Tao
110 
111   Input Parameters:
112 + tao - the Tao context
113 - X - input vector
114 
115   Output Parameter:
116 . G - gradient vector
117 
118   Options Database Keys:
119 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
120 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
121 
122   Notes:
123     TaoComputeGradient() is typically used within minimization implementations,
124   so most users would not generally call this routine themselves.
125 
126   Level: advanced
127 
128 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
129 @*/
130 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
131 {
132   PetscErrorCode ierr;
133   PetscReal      dummy;
134 
135   PetscFunctionBegin;
136   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
137   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
138   PetscValidHeaderSpecific(G,VEC_CLASSID,2);
139   PetscCheckSameComm(tao,1,X,2);
140   PetscCheckSameComm(tao,1,G,3);
141   ierr = VecLockPush(X);CHKERRQ(ierr);
142   if (tao->ops->computegradient) {
143     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
144     PetscStackPush("Tao user gradient evaluation routine");
145     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
146     PetscStackPop;
147     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
148     tao->ngrads++;
149   } else if (tao->ops->computeobjectiveandgradient) {
150     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
151     PetscStackPush("Tao user objective/gradient evaluation routine");
152     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr);
153     PetscStackPop;
154     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
155     tao->nfuncgrads++;
156   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
157   ierr = VecLockPop(X);CHKERRQ(ierr);
158 
159   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164   TaoComputeObjective - Computes the objective function value at a given point
165 
166   Collective on Tao
167 
168   Input Parameters:
169 + tao - the Tao context
170 - X - input vector
171 
172   Output Parameter:
173 . f - Objective value at X
174 
175   Notes:
176     TaoComputeObjective() is typically used within minimization implementations,
177   so most users would not generally call this routine themselves.
178 
179   Level: advanced
180 
181 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
182 @*/
183 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
184 {
185   PetscErrorCode ierr;
186   Vec            temp;
187 
188   PetscFunctionBegin;
189   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
190   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
191   PetscCheckSameComm(tao,1,X,2);
192   ierr = VecLockPush(X);CHKERRQ(ierr);
193   if (tao->ops->computeobjective) {
194     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
195     PetscStackPush("Tao user objective evaluation routine");
196     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
197     PetscStackPop;
198     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
199     tao->nfuncs++;
200   } else if (tao->ops->computeobjectiveandgradient) {
201     ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr);
202     ierr = VecDuplicate(X,&temp);CHKERRQ(ierr);
203     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
204     PetscStackPush("Tao user objective/gradient evaluation routine");
205     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr);
206     PetscStackPop;
207     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
208     ierr = VecDestroy(&temp);CHKERRQ(ierr);
209     tao->nfuncgrads++;
210   }  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
211   ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
212   ierr = VecLockPop(X);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 /*@
217   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
218 
219   Collective on Tao
220 
221   Input Parameters:
222 + tao - the Tao context
223 - X - input vector
224 
225   Output Parameter:
226 + f - Objective value at X
227 - g - Gradient vector at X
228 
229   Notes:
230     TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
231   so most users would not generally call this routine themselves.
232 
233   Level: advanced
234 
235 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
236 @*/
237 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
238 {
239   PetscErrorCode ierr;
240 
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
243   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
244   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
245   PetscCheckSameComm(tao,1,X,2);
246   PetscCheckSameComm(tao,1,G,4);
247   ierr = VecLockPush(X);CHKERRQ(ierr);
248   if (tao->ops->computeobjectiveandgradient) {
249     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
250     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
251       ierr = TaoComputeObjective(tao,X,f);CHKERRQ(ierr);
252       ierr = TaoDefaultComputeGradient(tao,X,G,NULL);CHKERRQ(ierr);
253     } else {
254       PetscStackPush("Tao user objective/gradient evaluation routine");
255       ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);CHKERRQ(ierr);
256       PetscStackPop;
257     }
258     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
259     tao->nfuncgrads++;
260   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
261     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
262     PetscStackPush("Tao user objective evaluation routine");
263     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
264     PetscStackPop;
265     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
266     tao->nfuncs++;
267     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
268     PetscStackPush("Tao user gradient evaluation routine");
269     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
270     PetscStackPop;
271     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
272     tao->ngrads++;
273   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
274   ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
275   ierr = VecLockPop(X);CHKERRQ(ierr);
276 
277   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
278   PetscFunctionReturn(0);
279 }
280 
281 /*@C
282   TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
283 
284   Logically collective on Tao
285 
286   Input Parameter:
287 + tao - the Tao context
288 . func - the objective function
289 - ctx - [optional] user-defined context for private data for the function evaluation
290         routine (may be NULL)
291 
292   Calling sequence of func:
293 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
294 
295 + x - input vector
296 . f - function value
297 - ctx - [optional] user-defined function context
298 
299   Level: beginner
300 
301 .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
302 @*/
303 PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
304 {
305   PetscFunctionBegin;
306   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
307   tao->user_objP = ctx;
308   tao->ops->computeobjective = func;
309   PetscFunctionReturn(0);
310 }
311 
312 /*@C
313   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
314 
315   Logically collective on Tao
316 
317   Input Parameter:
318 + tao - the Tao context
319 . func - the residual evaluation routine
320 - ctx - [optional] user-defined context for private data for the function evaluation
321         routine (may be NULL)
322 
323   Calling sequence of func:
324 $      func (Tao tao, Vec x, Vec f, void *ctx);
325 
326 + x - input vector
327 . f - function value vector
328 - ctx - [optional] user-defined function context
329 
330   Level: beginner
331 
332 .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
333 @*/
334 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
335 {
336   PetscErrorCode ierr;
337 
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
340   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
341   tao->user_lsresP = ctx;
342   if (tao->ls_res) {
343     ierr = VecDestroy(&tao->ls_res);CHKERRQ(ierr);
344   }
345   ierr = PetscObjectReference((PetscObject)res);CHKERRQ(ierr);
346   tao->ls_res = res;
347   tao->ops->computeresidual = func;
348 
349   PetscFunctionReturn(0);
350 }
351 
352 /*@
353   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
354 
355   Collective on Tao
356 
357   Input Parameters:
358 + tao - the Tao context
359 . sigma_v - vector of weights (diagonal terms only)
360 . n       - the number of weights (if using off-diagonal)
361 . rows    - index list of rows for sigma_w
362 . cols    - index list of columns for sigma_w
363 - vals - array of weights
364 
365 
366 
367   Note: Either sigma_v or sigma_w (or both) should be NULL
368 
369   Level: intermediate
370 
371 .seealso: TaoSetResidualRoutine()
372 @*/
373 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
374 {
375   PetscErrorCode ierr;
376   PetscInt       i;
377   PetscFunctionBegin;
378   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
379   ierr = VecDestroy(&tao->res_weights_v);CHKERRQ(ierr);
380   tao->res_weights_v=sigma_v;
381   if (sigma_v) {
382     ierr = PetscObjectReference((PetscObject)sigma_v);CHKERRQ(ierr);
383   }
384   if (vals) {
385     if (tao->res_weights_n) {
386       ierr = PetscFree(tao->res_weights_rows);CHKERRQ(ierr);
387       ierr = PetscFree(tao->res_weights_cols);CHKERRQ(ierr);
388       ierr = PetscFree(tao->res_weights_w);CHKERRQ(ierr);
389     }
390     ierr = PetscMalloc1(n,&tao->res_weights_rows);CHKERRQ(ierr);
391     ierr = PetscMalloc1(n,&tao->res_weights_cols);CHKERRQ(ierr);
392     ierr = PetscMalloc1(n,&tao->res_weights_w);CHKERRQ(ierr);
393     tao->res_weights_n=n;
394     for (i=0;i<n;i++) {
395       tao->res_weights_rows[i]=rows[i];
396       tao->res_weights_cols[i]=cols[i];
397       tao->res_weights_w[i]=vals[i];
398     }
399   } else {
400     tao->res_weights_n=0;
401     tao->res_weights_rows=0;
402     tao->res_weights_cols=0;
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /*@
408   TaoComputeResidual - Computes a least-squares residual vector at a given point
409 
410   Collective on Tao
411 
412   Input Parameters:
413 + tao - the Tao context
414 - X - input vector
415 
416   Output Parameter:
417 . f - Objective vector at X
418 
419   Notes:
420     TaoComputeResidual() is typically used within minimization implementations,
421   so most users would not generally call this routine themselves.
422 
423   Level: advanced
424 
425 .seealso: TaoSetResidualRoutine()
426 @*/
427 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
428 {
429   PetscErrorCode ierr;
430 
431   PetscFunctionBegin;
432   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
433   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
434   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
435   PetscCheckSameComm(tao,1,X,2);
436   PetscCheckSameComm(tao,1,F,3);
437   if (tao->ops->computeresidual) {
438     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
439     PetscStackPush("Tao user least-squares residual evaluation routine");
440     ierr = (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);CHKERRQ(ierr);
441     PetscStackPop;
442     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
443     tao->nfuncs++;
444   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
445   ierr = PetscInfo(tao,"TAO least-squares residual evaluation.\n");CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 /*@C
450   TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
451 
452   Logically collective on Tao
453 
454   Input Parameter:
455 + tao - the Tao context
456 . func - the gradient function
457 - ctx - [optional] user-defined context for private data for the gradient evaluation
458         routine (may be NULL)
459 
460   Calling sequence of func:
461 $      func (Tao tao, Vec x, Vec g, void *ctx);
462 
463 + x - input vector
464 . g - gradient value (output)
465 - ctx - [optional] user-defined function context
466 
467   Level: beginner
468 
469 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
470 @*/
471 PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
472 {
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
475   tao->user_gradP = ctx;
476   tao->ops->computegradient = func;
477   PetscFunctionReturn(0);
478 }
479 
480 /*@C
481   TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
482 
483   Logically collective on Tao
484 
485   Input Parameter:
486 + tao - the Tao context
487 . func - the gradient function
488 - ctx - [optional] user-defined context for private data for the gradient evaluation
489         routine (may be NULL)
490 
491   Calling sequence of func:
492 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
493 
494 + x - input vector
495 . f - objective value (output)
496 . g - gradient value (output)
497 - ctx - [optional] user-defined function context
498 
499   Level: beginner
500 
501 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
502 @*/
503 PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
504 {
505   PetscFunctionBegin;
506   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
507   tao->user_objgradP = ctx;
508   tao->ops->computeobjectiveandgradient = func;
509   PetscFunctionReturn(0);
510 }
511 
512 /*@
513   TaoIsObjectiveDefined -- Checks to see if the user has
514   declared an objective-only routine.  Useful for determining when
515   it is appropriate to call TaoComputeObjective() or
516   TaoComputeObjectiveAndGradient()
517 
518   Collective on Tao
519 
520   Input Parameter:
521 + tao - the Tao context
522 - ctx - PETSC_TRUE if objective function routine is set by user,
523         PETSC_FALSE otherwise
524   Level: developer
525 
526 .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
527 @*/
528 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
529 {
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
532   if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE;
533   else *flg = PETSC_TRUE;
534   PetscFunctionReturn(0);
535 }
536 
537 /*@
538   TaoIsGradientDefined -- Checks to see if the user has
539   declared an objective-only routine.  Useful for determining when
540   it is appropriate to call TaoComputeGradient() or
541   TaoComputeGradientAndGradient()
542 
543   Not Collective
544 
545   Input Parameter:
546 + tao - the Tao context
547 - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
548   Level: developer
549 
550 .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
551 @*/
552 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
553 {
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
556   if (tao->ops->computegradient == 0) *flg = PETSC_FALSE;
557   else *flg = PETSC_TRUE;
558   PetscFunctionReturn(0);
559 }
560 
561 /*@
562   TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
563   declared a joint objective/gradient routine.  Useful for determining when
564   it is appropriate to call TaoComputeObjective() or
565   TaoComputeObjectiveAndGradient()
566 
567   Not Collective
568 
569   Input Parameter:
570 + tao - the Tao context
571 - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
572   Level: developer
573 
574 .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
575 @*/
576 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
577 {
578   PetscFunctionBegin;
579   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
580   if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE;
581   else *flg = PETSC_TRUE;
582   PetscFunctionReturn(0);
583 }
584