xref: /petsc/src/tao/interface/taosolver_fg.c (revision b45e3bf4ff73d80a20c3202b6cd9f79d2f2d3efe)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetSolution - Sets the vector holding the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve(), TaoGetSolution()
14 @*/
15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16 {
17   PetscErrorCode ierr;
18 
19   PetscFunctionBegin;
20   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
21   if (x0) PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
22   ierr = PetscObjectReference((PetscObject)x0);CHKERRQ(ierr);
23   ierr = VecDestroy(&tao->solution);CHKERRQ(ierr);
24   tao->solution = x0;
25   PetscFunctionReturn(0);
26 }
27 
28 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
29 {
30   Vec               g2,g3;
31   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
32   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
33   PetscScalar       dot;
34   MPI_Comm          comm;
35   PetscViewer       viewer,mviewer;
36   PetscViewerFormat format;
37   PetscInt          tabs;
38   static PetscBool  directionsprinted = PETSC_FALSE;
39   PetscErrorCode    ierr;
40 
41   PetscFunctionBegin;
42   ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr);
43   ierr = PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);CHKERRQ(ierr);
44   ierr = PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);CHKERRQ(ierr);
45   ierr = PetscOptionsEnd();CHKERRQ(ierr);
46   if (!test) {
47     if (complete_print) {
48       ierr = PetscViewerDestroy(&mviewer);CHKERRQ(ierr);
49     }
50     PetscFunctionReturn(0);
51   }
52 
53   ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr);
54   ierr = PetscViewerASCIIGetStdout(comm,&viewer);CHKERRQ(ierr);
55   ierr = PetscViewerASCIIGetTab(viewer, &tabs);CHKERRQ(ierr);
56   ierr = PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);CHKERRQ(ierr);
57   ierr = PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n");CHKERRQ(ierr);
58   if (!complete_print && !directionsprinted) {
59     ierr = PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");CHKERRQ(ierr);
60     ierr = PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n");CHKERRQ(ierr);
61   }
62   if (!directionsprinted) {
63     ierr = PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n");CHKERRQ(ierr);
64     ierr = PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Gradient is probably correct.\n");CHKERRQ(ierr);
65     directionsprinted = PETSC_TRUE;
66   }
67   if (complete_print) {
68     ierr = PetscViewerPushFormat(mviewer,format);CHKERRQ(ierr);
69   }
70 
71   ierr = VecDuplicate(x,&g2);CHKERRQ(ierr);
72   ierr = VecDuplicate(x,&g3);CHKERRQ(ierr);
73 
74   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
75   ierr = TaoDefaultComputeGradient(tao,x,g2,NULL);CHKERRQ(ierr);
76 
77   ierr = VecNorm(g2,NORM_2,&fdnorm);CHKERRQ(ierr);
78   ierr = VecNorm(g1,NORM_2,&hcnorm);CHKERRQ(ierr);
79   ierr = VecNorm(g2,NORM_INFINITY,&fdmax);CHKERRQ(ierr);
80   ierr = VecNorm(g1,NORM_INFINITY,&hcmax);CHKERRQ(ierr);
81   ierr = VecDot(g1,g2,&dot);CHKERRQ(ierr);
82   ierr = VecCopy(g1,g3);CHKERRQ(ierr);
83   ierr = VecAXPY(g3,-1.0,g2);CHKERRQ(ierr);
84   ierr = VecNorm(g3,NORM_2,&diffnorm);CHKERRQ(ierr);
85   ierr = VecNorm(g3,NORM_INFINITY,&diffmax);CHKERRQ(ierr);
86   ierr = PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));CHKERRQ(ierr);
87   ierr = PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);CHKERRQ(ierr);
88   ierr = PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);CHKERRQ(ierr);
89 
90   if (complete_print) {
91     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n");CHKERRQ(ierr);
92     ierr = VecView(g1,mviewer);CHKERRQ(ierr);
93     ierr = PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n");CHKERRQ(ierr);
94     ierr = VecView(g2,mviewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n");CHKERRQ(ierr);
96     ierr = VecView(g3,mviewer);CHKERRQ(ierr);
97   }
98   ierr = VecDestroy(&g2);CHKERRQ(ierr);
99   ierr = VecDestroy(&g3);CHKERRQ(ierr);
100 
101   if (complete_print) {
102     ierr = PetscViewerPopFormat(mviewer);CHKERRQ(ierr);
103     ierr = PetscViewerDestroy(&mviewer);CHKERRQ(ierr);
104   }
105   ierr = PetscViewerASCIISetTab(viewer,tabs);CHKERRQ(ierr);
106   PetscFunctionReturn(0);
107 }
108 
109 /*@
110   TaoComputeGradient - Computes the gradient of the objective function
111 
112   Collective on Tao
113 
114   Input Parameters:
115 + tao - the Tao context
116 - X - input vector
117 
118   Output Parameter:
119 . G - gradient vector
120 
121   Options Database Keys:
122 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
123 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
124 
125   Notes:
126     TaoComputeGradient() is typically used within minimization implementations,
127   so most users would not generally call this routine themselves.
128 
129   Level: advanced
130 
131 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradient()
132 @*/
133 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
134 {
135   PetscErrorCode ierr;
136   PetscReal      dummy;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
140   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
141   PetscValidHeaderSpecific(G,VEC_CLASSID,3);
142   PetscCheckSameComm(tao,1,X,2);
143   PetscCheckSameComm(tao,1,G,3);
144   ierr = VecLockReadPush(X);CHKERRQ(ierr);
145   if (tao->ops->computegradient) {
146     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
147     PetscStackPush("Tao user gradient evaluation routine");
148     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
149     PetscStackPop;
150     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
151     tao->ngrads++;
152   } else if (tao->ops->computeobjectiveandgradient) {
153     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
154     PetscStackPush("Tao user objective/gradient evaluation routine");
155     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr);
156     PetscStackPop;
157     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
158     tao->nfuncgrads++;
159   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradient() has not been called");
160   ierr = VecLockReadPop(X);CHKERRQ(ierr);
161 
162   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 /*@
167   TaoComputeObjective - Computes the objective function value at a given point
168 
169   Collective on Tao
170 
171   Input Parameters:
172 + tao - the Tao context
173 - X - input vector
174 
175   Output Parameter:
176 . f - Objective value at X
177 
178   Notes:
179     TaoComputeObjective() is typically used within minimization implementations,
180   so most users would not generally call this routine themselves.
181 
182   Level: advanced
183 
184 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
185 @*/
186 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
187 {
188   PetscErrorCode ierr;
189   Vec            temp;
190 
191   PetscFunctionBegin;
192   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
193   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
194   PetscCheckSameComm(tao,1,X,2);
195   ierr = VecLockReadPush(X);CHKERRQ(ierr);
196   if (tao->ops->computeobjective) {
197     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
198     PetscStackPush("Tao user objective evaluation routine");
199     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
200     PetscStackPop;
201     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
202     tao->nfuncs++;
203   } else if (tao->ops->computeobjectiveandgradient) {
204     ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr);
205     ierr = VecDuplicate(X,&temp);CHKERRQ(ierr);
206     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
207     PetscStackPush("Tao user objective/gradient evaluation routine");
208     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr);
209     PetscStackPop;
210     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
211     ierr = VecDestroy(&temp);CHKERRQ(ierr);
212     tao->nfuncgrads++;
213   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() has not been called");
214   ierr = PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
215   ierr = VecLockReadPop(X);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 /*@
220   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
221 
222   Collective on Tao
223 
224   Input Parameters:
225 + tao - the Tao context
226 - X - input vector
227 
228   Output Parameters:
229 + f - Objective value at X
230 - g - Gradient vector at X
231 
232   Notes:
233     TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
234   so most users would not generally call this routine themselves.
235 
236   Level: advanced
237 
238 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjective()
239 @*/
240 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
241 {
242   PetscErrorCode ierr;
243 
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
246   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
247   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
248   PetscCheckSameComm(tao,1,X,2);
249   PetscCheckSameComm(tao,1,G,4);
250   ierr = VecLockReadPush(X);CHKERRQ(ierr);
251   if (tao->ops->computeobjectiveandgradient) {
252     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
253     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
254       ierr = TaoComputeObjective(tao,X,f);CHKERRQ(ierr);
255       ierr = TaoDefaultComputeGradient(tao,X,G,NULL);CHKERRQ(ierr);
256     } else {
257       PetscStackPush("Tao user objective/gradient evaluation routine");
258       ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);CHKERRQ(ierr);
259       PetscStackPop;
260     }
261     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
262     tao->nfuncgrads++;
263   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
264     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
265     PetscStackPush("Tao user objective evaluation routine");
266     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
267     PetscStackPop;
268     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
269     tao->nfuncs++;
270     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
271     PetscStackPush("Tao user gradient evaluation routine");
272     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
273     PetscStackPop;
274     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
275     tao->ngrads++;
276   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() or TaoSetGradient() not set");
277   ierr = PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
278   ierr = VecLockReadPop(X);CHKERRQ(ierr);
279 
280   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
281   PetscFunctionReturn(0);
282 }
283 
284 /*@C
285   TaoSetObjective - Sets the function evaluation routine for minimization
286 
287   Logically collective on Tao
288 
289   Input Parameters:
290 + tao - the Tao context
291 . func - the objective function
292 - ctx - [optional] user-defined context for private data for the function evaluation
293         routine (may be NULL)
294 
295   Calling sequence of func:
296 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
297 
298 + x - input vector
299 . f - function value
300 - ctx - [optional] user-defined function context
301 
302   Level: beginner
303 
304 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetObjective()
305 @*/
306 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
307 {
308   PetscFunctionBegin;
309   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
310   if (ctx) tao->user_objP = ctx;
311   if (func) tao->ops->computeobjective = func;
312   PetscFunctionReturn(0);
313 }
314 
315 /*@C
316   TaoGetObjective - Gets the function evaluation routine for minimization
317 
318   Not collective
319 
320   Input Parameter:
321 . tao - the Tao context
322 
323   Output Parameters
324 + func - the objective function
325 - ctx - the user-defined context for private data for the function evaluation
326 
327   Calling sequence of func:
328 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
329 
330 + x - input vector
331 . f - function value
332 - ctx - [optional] user-defined function context
333 
334   Level: beginner
335 
336 .seealso: TaoSetGradient(), TaoSetHessian(), TaoSetObjective()
337 @*/
338 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal*,void*),void **ctx)
339 {
340   PetscFunctionBegin;
341   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
342   if (func) *func = tao->ops->computeobjective;
343   if (ctx) *ctx = tao->user_objP;
344   PetscFunctionReturn(0);
345 }
346 
347 /*@C
348   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
349 
350   Logically collective on Tao
351 
352   Input Parameters:
353 + tao - the Tao context
354 . func - the residual evaluation routine
355 - ctx - [optional] user-defined context for private data for the function evaluation
356         routine (may be NULL)
357 
358   Calling sequence of func:
359 $      func (Tao tao, Vec x, Vec f, void *ctx);
360 
361 + x - input vector
362 . f - function value vector
363 - ctx - [optional] user-defined function context
364 
365   Level: beginner
366 
367 .seealso: TaoSetObjective(), TaoSetJacobianRoutine()
368 @*/
369 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
370 {
371   PetscErrorCode ierr;
372 
373   PetscFunctionBegin;
374   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
375   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
376   ierr = PetscObjectReference((PetscObject)res);CHKERRQ(ierr);
377   if (tao->ls_res) {
378     ierr = VecDestroy(&tao->ls_res);CHKERRQ(ierr);
379   }
380   tao->ls_res = res;
381   tao->user_lsresP = ctx;
382   tao->ops->computeresidual = func;
383 
384   PetscFunctionReturn(0);
385 }
386 
387 /*@
388   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
389 
390   Collective on Tao
391 
392   Input Parameters:
393 + tao - the Tao context
394 . sigma_v - vector of weights (diagonal terms only)
395 . n       - the number of weights (if using off-diagonal)
396 . rows    - index list of rows for sigma_w
397 . cols    - index list of columns for sigma_w
398 - vals - array of weights
399 
400   Note: Either sigma_v or sigma_w (or both) should be NULL
401 
402   Level: intermediate
403 
404 .seealso: TaoSetResidualRoutine()
405 @*/
406 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
407 {
408   PetscErrorCode ierr;
409   PetscInt       i;
410 
411   PetscFunctionBegin;
412   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
413   if (sigma_v) PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2);
414   ierr = PetscObjectReference((PetscObject)sigma_v);CHKERRQ(ierr);
415   ierr = VecDestroy(&tao->res_weights_v);CHKERRQ(ierr);
416   tao->res_weights_v = sigma_v;
417   if (vals) {
418     ierr = PetscFree(tao->res_weights_rows);CHKERRQ(ierr);
419     ierr = PetscFree(tao->res_weights_cols);CHKERRQ(ierr);
420     ierr = PetscFree(tao->res_weights_w);CHKERRQ(ierr);
421     ierr = PetscMalloc1(n,&tao->res_weights_rows);CHKERRQ(ierr);
422     ierr = PetscMalloc1(n,&tao->res_weights_cols);CHKERRQ(ierr);
423     ierr = PetscMalloc1(n,&tao->res_weights_w);CHKERRQ(ierr);
424     tao->res_weights_n = n;
425     for (i=0;i<n;i++) {
426       tao->res_weights_rows[i] = rows[i];
427       tao->res_weights_cols[i] = cols[i];
428       tao->res_weights_w[i] = vals[i];
429     }
430   } else {
431     tao->res_weights_n = 0;
432     tao->res_weights_rows = NULL;
433     tao->res_weights_cols = NULL;
434   }
435   PetscFunctionReturn(0);
436 }
437 
438 /*@
439   TaoComputeResidual - Computes a least-squares residual vector at a given point
440 
441   Collective on Tao
442 
443   Input Parameters:
444 + tao - the Tao context
445 - X - input vector
446 
447   Output Parameter:
448 . f - Objective vector at X
449 
450   Notes:
451     TaoComputeResidual() is typically used within minimization implementations,
452   so most users would not generally call this routine themselves.
453 
454   Level: advanced
455 
456 .seealso: TaoSetResidualRoutine()
457 @*/
458 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
459 {
460   PetscErrorCode ierr;
461 
462   PetscFunctionBegin;
463   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
464   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
465   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
466   PetscCheckSameComm(tao,1,X,2);
467   PetscCheckSameComm(tao,1,F,3);
468   if (tao->ops->computeresidual) {
469     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
470     PetscStackPush("Tao user least-squares residual evaluation routine");
471     ierr = (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);CHKERRQ(ierr);
472     PetscStackPop;
473     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
474     tao->nfuncs++;
475   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
476   ierr = PetscInfo(tao,"TAO least-squares residual evaluation.\n");CHKERRQ(ierr);
477   PetscFunctionReturn(0);
478 }
479 
480 /*@C
481   TaoSetGradient - Sets the gradient evaluation routine for minimization
482 
483   Logically collective on Tao
484 
485   Input Parameters:
486 + tao - the Tao context
487 . g - [optional] the vector to internally hold the gradient computation
488 . func - the gradient function
489 - ctx - [optional] user-defined context for private data for the gradient evaluation
490         routine (may be NULL)
491 
492   Calling sequence of func:
493 $      func (Tao tao, Vec x, Vec g, void *ctx);
494 
495 + x - input vector
496 . g - gradient value (output)
497 - ctx - [optional] user-defined function context
498 
499   Level: beginner
500 
501 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoGetGradient()
502 @*/
503 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
504 {
505   PetscErrorCode ierr;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
509   if (g) {
510     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
511     PetscCheckSameComm(tao,1,g,2);
512     ierr = PetscObjectReference((PetscObject)g);CHKERRQ(ierr);
513     ierr = VecDestroy(&tao->gradient);CHKERRQ(ierr);
514     tao->gradient = g;
515   }
516   if (func) tao->ops->computegradient = func;
517   if (ctx) tao->user_gradP = ctx;
518   PetscFunctionReturn(0);
519 }
520 
521 /*@C
522   TaoGetGradient - Gets the gradient evaluation routine for minimization
523 
524   Not collective
525 
526   Input Parameter:
527 . tao - the Tao context
528 
529   Output Parameters:
530 + g - the vector to internally hold the gradient computation
531 . func - the gradient function
532 - ctx - user-defined context for private data for the gradient evaluation routine
533 
534   Calling sequence of func:
535 $      func (Tao tao, Vec x, Vec g, void *ctx);
536 
537 + x - input vector
538 . g - gradient value (output)
539 - ctx - [optional] user-defined function context
540 
541   Level: beginner
542 
543 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetObjectiveAndGradient(), TaoSetGradient()
544 @*/
545 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void*),void **ctx)
546 {
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
549   if (g) *g = tao->gradient;
550   if (func) *func = tao->ops->computegradient;
551   if (ctx) *ctx = tao->user_gradP;
552   PetscFunctionReturn(0);
553 }
554 
555 /*@C
556   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for minimization
557 
558   Logically collective on Tao
559 
560   Input Parameters:
561 + tao - the Tao context
562 . g - [optional] the vector to internally hold the gradient computation
563 . func - the gradient function
564 - ctx - [optional] user-defined context for private data for the gradient evaluation
565         routine (may be NULL)
566 
567   Calling sequence of func:
568 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
569 
570 + x - input vector
571 . f - objective value (output)
572 . g - gradient value (output)
573 - ctx - [optional] user-defined function context
574 
575   Level: beginner
576 
577 .seealso: TaoSetObjective(), TaoSetHessian(), TaoSetGradient(), TaoGetObjectiveAndGradient()
578 @*/
579 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
580 {
581   PetscErrorCode ierr;
582 
583   PetscFunctionBegin;
584   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
585   if (g) {
586     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
587     PetscCheckSameComm(tao,1,g,2);
588     ierr = PetscObjectReference((PetscObject)g);CHKERRQ(ierr);
589     ierr = VecDestroy(&tao->gradient);CHKERRQ(ierr);
590     tao->gradient = g;
591   }
592   if (ctx) tao->user_objgradP = ctx;
593   if (func) tao->ops->computeobjectiveandgradient = func;
594   PetscFunctionReturn(0);
595 }
596 
597 /*@C
598   TaoGetObjectiveAndGradient - Gets a combined objective function and gradient evaluation routine for minimization
599 
600   Not collective
601 
602   Input Parameter:
603 . tao - the Tao context
604 
605   Output Parameters:
606 . g - the vector to internally hold the gradient computation
607 . func - the gradient function
608 - ctx - user-defined context for private data for the gradient evaluation routine
609 
610   Calling sequence of func:
611 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
612 
613 + x - input vector
614 . f - objective value (output)
615 . g - gradient value (output)
616 - ctx - [optional] user-defined function context
617 
618   Level: beginner
619 
620 .seealso: TaoSetObjective(), TaoSetGradient(), TaoSetHessian(), TaoSetObjectiveAndGradient()
621 @*/
622 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal*, Vec, void*), void **ctx)
623 {
624   PetscFunctionBegin;
625   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
626   if (g) *g = tao->gradient;
627   if (func) *func = tao->ops->computeobjectiveandgradient;
628   if (ctx) *ctx = tao->user_objgradP;
629   PetscFunctionReturn(0);
630 }
631 
632 /*@
633   TaoIsObjectiveDefined - Checks to see if the user has
634   declared an objective-only routine.  Useful for determining when
635   it is appropriate to call TaoComputeObjective() or
636   TaoComputeObjectiveAndGradient()
637 
638   Not collective
639 
640   Input Parameter:
641 . tao - the Tao context
642 
643   Output Parameter:
644 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
645 
646   Level: developer
647 
648 .seealso: TaoSetObjective(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
649 @*/
650 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
651 {
652   PetscFunctionBegin;
653   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
654   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
655   else *flg = PETSC_TRUE;
656   PetscFunctionReturn(0);
657 }
658 
659 /*@
660   TaoIsGradientDefined - Checks to see if the user has
661   declared an objective-only routine.  Useful for determining when
662   it is appropriate to call TaoComputeGradient() or
663   TaoComputeGradientAndGradient()
664 
665   Not Collective
666 
667   Input Parameter:
668 . tao - the Tao context
669 
670   Output Parameter:
671 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
672 
673   Level: developer
674 
675 .seealso: TaoSetGradient(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
676 @*/
677 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
678 {
679   PetscFunctionBegin;
680   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
681   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
682   else *flg = PETSC_TRUE;
683   PetscFunctionReturn(0);
684 }
685 
686 /*@
687   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
688   declared a joint objective/gradient routine.  Useful for determining when
689   it is appropriate to call TaoComputeObjective() or
690   TaoComputeObjectiveAndGradient()
691 
692   Not Collective
693 
694   Input Parameter:
695 . tao - the Tao context
696 
697   Output Parameter:
698 . flg - PETSC_TRUE if function routine is set by user, PETSC_FALSE otherwise
699 
700   Level: developer
701 
702 .seealso: TaoSetObjectiveAndGradient(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
703 @*/
704 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
705 {
706   PetscFunctionBegin;
707   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
708   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
709   else *flg = PETSC_TRUE;
710   PetscFunctionReturn(0);
711 }
712