xref: /petsc/src/tao/interface/taosolver_fg.c (revision b41ce5d507ea9a58bfa83cf403107a702e77a67d)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetInitialVector - Sets the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve()
14 @*/
15 
16 PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17 {
18   PetscErrorCode ierr;
19 
20   PetscFunctionBegin;
21   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
22   if (x0) {
23     PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
24     PetscObjectReference((PetscObject)x0);
25   }
26   ierr = VecDestroy(&tao->solution);CHKERRQ(ierr);
27   tao->solution = x0;
28   PetscFunctionReturn(0);
29 }
30 
31 /*@
32   TaoComputeGradient - Computes the gradient of the objective function
33 
34   Collective on Tao
35 
36   Input Parameters:
37 + tao - the Tao context
38 - X - input vector
39 
40   Output Parameter:
41 . G - gradient vector
42 
43   Notes: TaoComputeGradient() is typically used within minimization implementations,
44   so most users would not generally call this routine themselves.
45 
46   Level: advanced
47 
48 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
49 @*/
50 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
51 {
52   PetscErrorCode ierr;
53   PetscReal      dummy;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
57   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
58   PetscValidHeaderSpecific(G,VEC_CLASSID,2);
59   PetscCheckSameComm(tao,1,X,2);
60   PetscCheckSameComm(tao,1,G,3);
61   if (tao->ops->computegradient) {
62     ierr = PetscLogEventBegin(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
63     PetscStackPush("Tao user gradient evaluation routine");
64     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
65     PetscStackPop;
66     ierr = PetscLogEventEnd(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
67     tao->ngrads++;
68   } else if (tao->ops->computeobjectiveandgradient) {
69     ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
70     PetscStackPush("Tao user objective/gradient evaluation routine");
71     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr);
72     PetscStackPop;
73     ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
74     tao->nfuncgrads++;
75   }  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
76   PetscFunctionReturn(0);
77 }
78 
79 /*@
80   TaoComputeObjective - Computes the objective function value at a given point
81 
82   Collective on Tao
83 
84   Input Parameters:
85 + tao - the Tao context
86 - X - input vector
87 
88   Output Parameter:
89 . f - Objective value at X
90 
91   Notes: TaoComputeObjective() is typically used within minimization implementations,
92   so most users would not generally call this routine themselves.
93 
94   Level: advanced
95 
96 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
97 @*/
98 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
99 {
100   PetscErrorCode ierr;
101   Vec            temp;
102 
103   PetscFunctionBegin;
104   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
105   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
106   PetscCheckSameComm(tao,1,X,2);
107   if (tao->ops->computeobjective) {
108     ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
109     PetscStackPush("Tao user objective evaluation routine");
110     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
111     PetscStackPop;
112     ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
113     tao->nfuncs++;
114   } else if (tao->ops->computeobjectiveandgradient) {
115     ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr);
116     ierr = VecDuplicate(X,&temp);CHKERRQ(ierr);
117     ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,NULL,NULL);CHKERRQ(ierr);
118     PetscStackPush("Tao user objective/gradient evaluation routine");
119     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr);
120     PetscStackPop;
121     ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,NULL,NULL);CHKERRQ(ierr);
122     ierr = VecDestroy(&temp);CHKERRQ(ierr);
123     tao->nfuncgrads++;
124   }  else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
125   ierr = PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",(double)(*f));CHKERRQ(ierr);
126   PetscFunctionReturn(0);
127 }
128 
129 /*@
130   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
131 
132   Collective on Tao
133 
134   Input Parameters:
135 + tao - the Tao context
136 - X - input vector
137 
138   Output Parameter:
139 + f - Objective value at X
140 - g - Gradient vector at X
141 
142   Notes: TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
143   so most users would not generally call this routine themselves.
144 
145   Level: advanced
146 
147 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
148 @*/
149 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
150 {
151   PetscErrorCode ierr;
152 
153   PetscFunctionBegin;
154   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
155   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
156   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
157   PetscCheckSameComm(tao,1,X,2);
158   PetscCheckSameComm(tao,1,G,4);
159   if (tao->ops->computeobjectiveandgradient) {
160     ierr = PetscLogEventBegin(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
161     PetscStackPush("Tao user objective/gradient evaluation routine");
162     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);CHKERRQ(ierr);
163     PetscStackPop;
164     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
165       /* Overwrite gradient with finite difference gradient */
166       ierr = TaoDefaultComputeGradient(tao,X,G,tao->user_objgradP);CHKERRQ(ierr);
167     }
168     ierr = PetscLogEventEnd(Tao_ObjGradientEval,tao,X,G,NULL);CHKERRQ(ierr);
169     tao->nfuncgrads++;
170   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
171     ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
172     PetscStackPush("Tao user objective evaluation routine");
173     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
174     PetscStackPop;
175     ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
176     tao->nfuncs++;
177     ierr = PetscLogEventBegin(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
178     PetscStackPush("Tao user gradient evaluation routine");
179     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
180     PetscStackPop;
181     ierr = PetscLogEventEnd(Tao_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
182     tao->ngrads++;
183   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
184   ierr = PetscInfo1(tao,"TAO Function evaluation: %14.12e\n",(double)(*f));CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 /*@C
189   TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
190 
191   Logically collective on Tao
192 
193   Input Parameter:
194 + tao - the Tao context
195 . func - the objective function
196 - ctx - [optional] user-defined context for private data for the function evaluation
197         routine (may be NULL)
198 
199   Calling sequence of func:
200 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
201 
202 + x - input vector
203 . f - function value
204 - ctx - [optional] user-defined function context
205 
206   Level: beginner
207 
208 .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
209 @*/
210 PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
211 {
212   PetscFunctionBegin;
213   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
214   tao->user_objP = ctx;
215   tao->ops->computeobjective = func;
216   PetscFunctionReturn(0);
217 }
218 
219 /*@C
220   TaoSetSeparableObjectiveRoutine - Sets the function evaluation routine for least-square applications
221 
222   Logically collective on Tao
223 
224   Input Parameter:
225 + tao - the Tao context
226 . func - the objective function evaluation routine
227 - ctx - [optional] user-defined context for private data for the function evaluation
228         routine (may be NULL)
229 
230   Calling sequence of func:
231 $      func (Tao tao, Vec x, Vec f, void *ctx);
232 
233 + x - input vector
234 . f - function value vector
235 - ctx - [optional] user-defined function context
236 
237   Level: beginner
238 
239 .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
240 @*/
241 PetscErrorCode TaoSetSeparableObjectiveRoutine(Tao tao, Vec sepobj, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
242 {
243   PetscFunctionBegin;
244   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
245   PetscValidHeaderSpecific(sepobj, VEC_CLASSID,2);
246   tao->user_sepobjP = ctx;
247   tao->sep_objective = sepobj;
248   tao->ops->computeseparableobjective = func;
249   PetscFunctionReturn(0);
250 }
251 
252 /*@
253   TaoSetSeparableObjectiveWeights - Give weights for the separable objective values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
254 
255   Collective on Tao
256 
257   Input Parameters:
258 + tao - the Tao context
259 . sigma_v - vector of weights (diagonal terms only)
260 . n       - the number of weights (if using off-diagonal)
261 . rows    - index list of rows for sigma_w
262 . cols    - index list of columns for sigma_w
263 - vals - array of weights
264 
265 
266 
267   Note: Either sigma_v or sigma_w (or both) should be NULL
268 
269   Level: intermediate
270 
271 .seealso: TaoSetSeparableObjectiveRoutine()
272 @*/
273 PetscErrorCode TaoSetSeparableObjectiveWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
274 {
275   PetscErrorCode ierr;
276   PetscInt       i;
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
279   ierr = VecDestroy(&tao->sep_weights_v);CHKERRQ(ierr);
280   tao->sep_weights_v=sigma_v;
281   if (sigma_v) {
282     ierr = PetscObjectReference((PetscObject)sigma_v);CHKERRQ(ierr);
283   }
284   if (vals) {
285     if (tao->sep_weights_n) {
286       ierr = PetscFree(tao->sep_weights_rows);CHKERRQ(ierr);
287       ierr = PetscFree(tao->sep_weights_cols);CHKERRQ(ierr);
288       ierr = PetscFree(tao->sep_weights_w);CHKERRQ(ierr);
289     }
290     ierr = PetscMalloc1(n,&tao->sep_weights_rows);CHKERRQ(ierr);
291     ierr = PetscMalloc1(n,&tao->sep_weights_cols);CHKERRQ(ierr);
292     ierr = PetscMalloc1(n,&tao->sep_weights_w);CHKERRQ(ierr);
293     tao->sep_weights_n=n;
294     for (i=0;i<n;i++) {
295       tao->sep_weights_rows[i]=rows[i];
296       tao->sep_weights_cols[i]=cols[i];
297       tao->sep_weights_w[i]=vals[i];
298     }
299   } else {
300     tao->sep_weights_n=0;
301     tao->sep_weights_rows=0;
302     tao->sep_weights_cols=0;
303   }
304   PetscFunctionReturn(0);
305 }
306 /*@
307   TaoComputeSeparableObjective - Computes a separable objective function vector at a given point (for least-square applications)
308 
309   Collective on Tao
310 
311   Input Parameters:
312 + tao - the Tao context
313 - X - input vector
314 
315   Output Parameter:
316 . f - Objective vector at X
317 
318   Notes: TaoComputeSeparableObjective() is typically used within minimization implementations,
319   so most users would not generally call this routine themselves.
320 
321   Level: advanced
322 
323 .seealso: TaoSetSeparableObjectiveRoutine()
324 @*/
325 PetscErrorCode TaoComputeSeparableObjective(Tao tao, Vec X, Vec F)
326 {
327   PetscErrorCode ierr;
328 
329   PetscFunctionBegin;
330   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
331   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
332   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
333   PetscCheckSameComm(tao,1,X,2);
334   PetscCheckSameComm(tao,1,F,3);
335   if (tao->ops->computeseparableobjective) {
336     ierr = PetscLogEventBegin(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
337     PetscStackPush("Tao user separable objective evaluation routine");
338     ierr = (*tao->ops->computeseparableobjective)(tao,X,F,tao->user_sepobjP);CHKERRQ(ierr);
339     PetscStackPop;
340     ierr = PetscLogEventEnd(Tao_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
341     tao->nfuncs++;
342   } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"TaoSetSeparableObjectiveRoutine() has not been called");
343   ierr = PetscInfo(tao,"TAO separable function evaluation.\n");CHKERRQ(ierr);
344   PetscFunctionReturn(0);
345 }
346 
347 /*@C
348   TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
349 
350   Logically collective on Tao
351 
352   Input Parameter:
353 + tao - the Tao context
354 . func - the gradient function
355 - ctx - [optional] user-defined context for private data for the gradient evaluation
356         routine (may be NULL)
357 
358   Calling sequence of func:
359 $      func (Tao tao, Vec x, Vec g, void *ctx);
360 
361 + x - input vector
362 . g - gradient value (output)
363 - ctx - [optional] user-defined function context
364 
365   Level: beginner
366 
367 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
368 @*/
369 PetscErrorCode TaoSetGradientRoutine(Tao tao,  PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
370 {
371   PetscFunctionBegin;
372   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
373   tao->user_gradP = ctx;
374   tao->ops->computegradient = func;
375   PetscFunctionReturn(0);
376 }
377 
378 
379 /*@C
380   TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
381 
382   Logically collective on Tao
383 
384   Input Parameter:
385 + tao - the Tao context
386 . func - the gradient function
387 - ctx - [optional] user-defined context for private data for the gradient evaluation
388         routine (may be NULL)
389 
390   Calling sequence of func:
391 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
392 
393 + x - input vector
394 . f - objective value (output)
395 . g - gradient value (output)
396 - ctx - [optional] user-defined function context
397 
398   Level: beginner
399 
400 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
401 @*/
402 PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
403 {
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
406   tao->user_objgradP = ctx;
407   tao->ops->computeobjectiveandgradient = func;
408   PetscFunctionReturn(0);
409 }
410 
411 /*@
412   TaoIsObjectiveDefined -- Checks to see if the user has
413   declared an objective-only routine.  Useful for determining when
414   it is appropriate to call TaoComputeObjective() or
415   TaoComputeObjectiveAndGradient()
416 
417   Collective on Tao
418 
419   Input Parameter:
420 + tao - the Tao context
421 - ctx - PETSC_TRUE if objective function routine is set by user,
422         PETSC_FALSE otherwise
423   Level: developer
424 
425 .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
426 @*/
427 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
428 {
429   PetscFunctionBegin;
430   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
431   if (tao->ops->computeobjective == 0) *flg = PETSC_FALSE;
432   else *flg = PETSC_TRUE;
433   PetscFunctionReturn(0);
434 }
435 
436 /*@
437   TaoIsGradientDefined -- Checks to see if the user has
438   declared an objective-only routine.  Useful for determining when
439   it is appropriate to call TaoComputeGradient() or
440   TaoComputeGradientAndGradient()
441 
442   Not Collective
443 
444   Input Parameter:
445 + tao - the Tao context
446 - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
447   Level: developer
448 
449 .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
450 @*/
451 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
452 {
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
455   if (tao->ops->computegradient == 0) *flg = PETSC_FALSE;
456   else *flg = PETSC_TRUE;
457   PetscFunctionReturn(0);
458 }
459 
460 
461 /*@
462   TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
463   declared a joint objective/gradient routine.  Useful for determining when
464   it is appropriate to call TaoComputeObjective() or
465   TaoComputeObjectiveAndGradient()
466 
467   Not Collective
468 
469   Input Parameter:
470 + tao - the Tao context
471 - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
472   Level: developer
473 
474 .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
475 @*/
476 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
477 {
478   PetscFunctionBegin;
479   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
480   if (tao->ops->computeobjectiveandgradient == 0) *flg = PETSC_FALSE;
481   else *flg = PETSC_TRUE;
482   PetscFunctionReturn(0);
483 }
484 
485 
486 
487