xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision d8e47b638cf8f604a99e9678e1df24f82d959cd7)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h> /*I      "petscdmredundant.h" I*/
3 
4 typedef struct {
5   PetscMPIInt rank; /* owner */
6   PetscInt    N;    /* total number of dofs */
7   PetscInt    n;    /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm, Mat *J)
11 {
12   DM_Redundant          *red = (DM_Redundant *)dm->data;
13   ISLocalToGlobalMapping ltog;
14   PetscInt               i, rstart, rend, *cols;
15   PetscScalar           *vals;
16 
17   PetscFunctionBegin;
18   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
19   PetscCall(MatSetSizes(*J, red->n, red->n, red->N, red->N));
20   PetscCall(MatSetType(*J, dm->mattype));
21   PetscCall(MatSeqAIJSetPreallocation(*J, red->n, NULL));
22   PetscCall(MatSeqBAIJSetPreallocation(*J, 1, red->n, NULL));
23   PetscCall(MatMPIAIJSetPreallocation(*J, red->n, NULL, red->N - red->n, NULL));
24   PetscCall(MatMPIBAIJSetPreallocation(*J, 1, red->n, NULL, red->N - red->n, NULL));
25 
26   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
27   PetscCall(MatSetLocalToGlobalMapping(*J, ltog, ltog));
28   PetscCall(MatSetDM(*J, dm));
29 
30   PetscCall(PetscMalloc2(red->N, &cols, red->N, &vals));
31   for (i = 0; i < red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
36   for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, red->N, cols, vals, INSERT_VALUES));
37   PetscCall(PetscFree2(cols, vals));
38   PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
39   PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
40   PetscFunctionReturn(PETSC_SUCCESS);
41 }
42 
43 static PetscErrorCode DMDestroy_Redundant(DM dm)
44 {
45   PetscFunctionBegin;
46   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", NULL));
49   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
50   PetscCall(PetscFree(dm->data));
51   PetscFunctionReturn(PETSC_SUCCESS);
52 }
53 
54 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm, Vec *gvec)
55 {
56   DM_Redundant          *red = (DM_Redundant *)dm->data;
57   ISLocalToGlobalMapping ltog;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
61   PetscAssertPointer(gvec, 2);
62   *gvec = NULL;
63   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm), gvec));
64   PetscCall(VecSetSizes(*gvec, red->n, red->N));
65   PetscCall(VecSetType(*gvec, dm->vectype));
66   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
67   PetscCall(VecSetLocalToGlobalMapping(*gvec, ltog));
68   PetscCall(VecSetDM(*gvec, dm));
69   PetscFunctionReturn(PETSC_SUCCESS);
70 }
71 
72 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm, Vec *lvec)
73 {
74   DM_Redundant *red = (DM_Redundant *)dm->data;
75 
76   PetscFunctionBegin;
77   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
78   PetscAssertPointer(lvec, 2);
79   *lvec = NULL;
80   PetscCall(VecCreate(PETSC_COMM_SELF, lvec));
81   PetscCall(VecSetSizes(*lvec, red->N, red->N));
82   PetscCall(VecSetType(*lvec, dm->vectype));
83   PetscCall(VecSetDM(*lvec, dm));
84   PetscFunctionReturn(PETSC_SUCCESS);
85 }
86 
87 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
88 {
89   DM_Redundant      *red = (DM_Redundant *)dm->data;
90   const PetscScalar *lv;
91   PetscScalar       *gv;
92   PetscMPIInt        rank, iN;
93 
94   PetscFunctionBegin;
95   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
96   PetscCall(VecGetArrayRead(l, &lv));
97   PetscCall(VecGetArray(g, &gv));
98   switch (imode) {
99   case ADD_VALUES:
100   case MAX_VALUES: {
101     void        *source;
102     PetscScalar *buffer;
103     PetscInt     i;
104     if (rank == red->rank) {
105       buffer = gv;
106       source = MPI_IN_PLACE;
107       if (imode == ADD_VALUES)
108         for (i = 0; i < red->N; i++) buffer[i] = gv[i] + lv[i];
109 #if !defined(PETSC_USE_COMPLEX)
110       if (imode == MAX_VALUES)
111         for (i = 0; i < red->N; i++) buffer[i] = PetscMax(gv[i], lv[i]);
112 #endif
113     } else source = (void *)lv;
114     PetscCall(PetscMPIIntCast(red->N, &iN));
115     PetscCallMPI(MPI_Reduce(source, gv, iN, MPIU_SCALAR, (imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX, red->rank, PetscObjectComm((PetscObject)dm)));
116   } break;
117   case INSERT_VALUES:
118     PetscCall(PetscArraycpy(gv, lv, red->n));
119     break;
120   default:
121     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
122   }
123   PetscCall(VecRestoreArrayRead(l, &lv));
124   PetscCall(VecRestoreArray(g, &gv));
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
128 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
129 {
130   PetscFunctionBegin;
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
134 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
135 {
136   DM_Redundant      *red = (DM_Redundant *)dm->data;
137   const PetscScalar *gv;
138   PetscScalar       *lv;
139   PetscMPIInt        iN;
140 
141   PetscFunctionBegin;
142   PetscCall(VecGetArrayRead(g, &gv));
143   PetscCall(VecGetArray(l, &lv));
144   switch (imode) {
145   case INSERT_VALUES:
146     if (red->n) PetscCall(PetscArraycpy(lv, gv, red->n));
147     PetscCall(PetscMPIIntCast(red->N, &iN));
148     PetscCallMPI(MPI_Bcast(lv, iN, MPIU_SCALAR, red->rank, PetscObjectComm((PetscObject)dm)));
149     break;
150   default:
151     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
152   }
153   PetscCall(VecRestoreArrayRead(g, &gv));
154   PetscCall(VecRestoreArray(l, &lv));
155   PetscFunctionReturn(PETSC_SUCCESS);
156 }
157 
158 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
159 {
160   PetscFunctionBegin;
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 static PetscErrorCode DMSetUp_Redundant(DM dm)
165 {
166   PetscFunctionBegin;
167   PetscFunctionReturn(PETSC_SUCCESS);
168 }
169 
170 static PetscErrorCode DMView_Redundant(DM dm, PetscViewer viewer)
171 {
172   DM_Redundant *red = (DM_Redundant *)dm->data;
173   PetscBool     iascii;
174 
175   PetscFunctionBegin;
176   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
177   if (iascii) PetscCall(PetscViewerASCIIPrintf(viewer, "redundant: rank=%d N=%" PetscInt_FMT "\n", red->rank, red->N));
178   PetscFunctionReturn(PETSC_SUCCESS);
179 }
180 
181 static PetscErrorCode DMCreateColoring_Redundant(DM dm, ISColoringType ctype, ISColoring *coloring)
182 {
183   DM_Redundant    *red = (DM_Redundant *)dm->data;
184   PetscInt         i, nloc;
185   ISColoringValue *colors;
186 
187   PetscFunctionBegin;
188   switch (ctype) {
189   case IS_COLORING_GLOBAL:
190     nloc = red->n;
191     break;
192   case IS_COLORING_LOCAL:
193     nloc = red->N;
194     break;
195   default:
196     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Unknown ISColoringType %d", (int)ctype);
197   }
198   PetscCall(PetscMalloc1(nloc, &colors));
199   for (i = 0; i < nloc; i++) PetscCall(ISColoringValueCast(i, colors + i));
200   PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)dm), red->N, nloc, colors, PETSC_OWN_POINTER, coloring));
201   PetscCall(ISColoringSetType(*coloring, ctype));
202   PetscFunctionReturn(PETSC_SUCCESS);
203 }
204 
205 static PetscErrorCode DMRefine_Redundant(DM dmc, MPI_Comm comm, DM *dmf)
206 {
207   PetscMPIInt   flag;
208   DM_Redundant *redc = (DM_Redundant *)dmc->data;
209 
210   PetscFunctionBegin;
211   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmc, &comm));
212   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), comm, &flag));
213   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "cannot change communicators");
214   PetscCall(DMRedundantCreate(comm, redc->rank, redc->N, dmf));
215   PetscFunctionReturn(PETSC_SUCCESS);
216 }
217 
218 static PetscErrorCode DMCoarsen_Redundant(DM dmf, MPI_Comm comm, DM *dmc)
219 {
220   PetscMPIInt   flag;
221   DM_Redundant *redf = (DM_Redundant *)dmf->data;
222 
223   PetscFunctionBegin;
224   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm));
225   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmf), comm, &flag));
226   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
227   PetscCall(DMRedundantCreate(comm, redf->rank, redf->N, dmc));
228   PetscFunctionReturn(PETSC_SUCCESS);
229 }
230 
231 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc, DM dmf, Mat *P, Vec *scale)
232 {
233   DM_Redundant *redc = (DM_Redundant *)dmc->data;
234   DM_Redundant *redf = (DM_Redundant *)dmf->data;
235   PetscMPIInt   flag;
236   PetscInt      i, rstart, rend;
237 
238   PetscFunctionBegin;
239   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), PetscObjectComm((PetscObject)dmf), &flag));
240   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
241   PetscCheck(redc->rank == redf->rank, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Owning rank does not match");
242   PetscCheck(redc->N == redf->N, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Global size does not match");
243   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), P));
244   PetscCall(MatSetSizes(*P, redc->n, redc->n, redc->N, redc->N));
245   PetscCall(MatSetType(*P, MATAIJ));
246   PetscCall(MatSeqAIJSetPreallocation(*P, 1, NULL));
247   PetscCall(MatMPIAIJSetPreallocation(*P, 1, NULL, 0, NULL));
248   PetscCall(MatGetOwnershipRange(*P, &rstart, &rend));
249   for (i = rstart; i < rend; i++) PetscCall(MatSetValue(*P, i, i, 1.0, INSERT_VALUES));
250   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
251   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
252   if (scale) PetscCall(DMCreateInterpolationScale(dmc, dmf, *P, scale));
253   PetscFunctionReturn(PETSC_SUCCESS);
254 }
255 
256 /*@
257   DMRedundantSetSize - Sets the size of a densely coupled redundant object
258 
259   Collective
260 
261   Input Parameters:
262 + dm   - `DM` object of type `DMREDUNDANT`
263 . rank - rank of process to own the redundant degrees of freedom
264 - N    - total number of redundant degrees of freedom
265 
266   Level: advanced
267 
268 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantGetSize()`
269 @*/
270 PetscErrorCode DMRedundantSetSize(DM dm, PetscMPIInt rank, PetscInt N)
271 {
272   PetscFunctionBegin;
273   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
274   PetscValidType(dm, 1);
275   PetscValidLogicalCollectiveMPIInt(dm, rank, 2);
276   PetscValidLogicalCollectiveInt(dm, N, 3);
277   PetscTryMethod(dm, "DMRedundantSetSize_C", (DM, PetscMPIInt, PetscInt), (dm, rank, N));
278   PetscFunctionReturn(PETSC_SUCCESS);
279 }
280 
281 /*@
282   DMRedundantGetSize - Gets the size of a densely coupled redundant object
283 
284   Not Collective
285 
286   Input Parameter:
287 . dm - `DM` object of type `DMREDUNDANT`
288 
289   Output Parameters:
290 + rank - rank of process to own the redundant degrees of freedom (or `NULL`)
291 - N    - total number of redundant degrees of freedom (or `NULL`)
292 
293   Level: advanced
294 
295 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantSetSize()`
296 @*/
297 PetscErrorCode DMRedundantGetSize(DM dm, PetscMPIInt *rank, PetscInt *N)
298 {
299   PetscFunctionBegin;
300   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
301   PetscValidType(dm, 1);
302   PetscUseMethod(dm, "DMRedundantGetSize_C", (DM, PetscMPIInt *, PetscInt *), (dm, rank, N));
303   PetscFunctionReturn(PETSC_SUCCESS);
304 }
305 
306 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm, PetscMPIInt rank, PetscInt N)
307 {
308   DM_Redundant *red = (DM_Redundant *)dm->data;
309   PetscMPIInt   myrank;
310   PetscInt      i, *globals;
311 
312   PetscFunctionBegin;
313   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &myrank));
314   red->rank = rank;
315   red->N    = N;
316   red->n    = (myrank == rank) ? N : 0;
317 
318   /* mapping is setup here */
319   PetscCall(PetscMalloc1(red->N, &globals));
320   for (i = 0; i < red->N; i++) globals[i] = i;
321   PetscCall(ISLocalToGlobalMappingDestroy(&dm->ltogmap));
322   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, red->N, globals, PETSC_OWN_POINTER, &dm->ltogmap));
323   PetscFunctionReturn(PETSC_SUCCESS);
324 }
325 
326 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm, PetscInt *rank, PetscInt *N)
327 {
328   DM_Redundant *red = (DM_Redundant *)dm->data;
329 
330   PetscFunctionBegin;
331   if (rank) *rank = red->rank;
332   if (N) *N = red->N;
333   PetscFunctionReturn(PETSC_SUCCESS);
334 }
335 
336 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
337 {
338   PetscFunctionBegin;
339   PetscFunctionReturn(PETSC_SUCCESS);
340 }
341 
342 /*MC
343    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
344          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
345          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
346          processes local computations).
347 
348          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
349 
350   Level: intermediate
351 
352 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreate()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
353 M*/
354 
355 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
356 {
357   DM_Redundant *red;
358 
359   PetscFunctionBegin;
360   PetscCall(PetscNew(&red));
361   dm->data = red;
362 
363   dm->ops->setup               = DMSetUp_Redundant;
364   dm->ops->view                = DMView_Redundant;
365   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
366   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
367   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
368   dm->ops->destroy             = DMDestroy_Redundant;
369   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
370   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
371   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
372   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
373   dm->ops->refine              = DMRefine_Redundant;
374   dm->ops->coarsen             = DMCoarsen_Redundant;
375   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
376   dm->ops->getcoloring         = DMCreateColoring_Redundant;
377 
378   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", DMRedundantSetSize_Redundant));
379   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", DMRedundantGetSize_Redundant));
380   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", DMSetUpGLVisViewer_Redundant));
381   PetscFunctionReturn(PETSC_SUCCESS);
382 }
383 
384 /*@
385   DMRedundantCreate - Creates a `DM` object, used to manage data for dense globally coupled variables
386 
387   Collective
388 
389   Input Parameters:
390 + comm - the processors that will share the global vector
391 . rank - the MPI rank to own the redundant values
392 - N    - total number of degrees of freedom
393 
394   Output Parameter:
395 . dm - the `DM` object of type `DMREDUNDANT`
396 
397   Level: advanced
398 
399 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMCreateMatrix()`, `DMCompositeAddDM()`, `DMSetType()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
400 @*/
401 PetscErrorCode DMRedundantCreate(MPI_Comm comm, PetscMPIInt rank, PetscInt N, DM *dm)
402 {
403   PetscFunctionBegin;
404   PetscAssertPointer(dm, 4);
405   PetscCall(DMCreate(comm, dm));
406   PetscCall(DMSetType(*dm, DMREDUNDANT));
407   PetscCall(DMRedundantSetSize(*dm, rank, N));
408   PetscCall(DMSetUp(*dm));
409   PetscFunctionReturn(PETSC_SUCCESS);
410 }
411