xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h> /*I      "petscdmredundant.h" I*/
3 
4 typedef struct {
5   PetscMPIInt rank; /* owner */
6   PetscInt    N;    /* total number of dofs */
7   PetscInt    n;    /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm, Mat *J) {
11   DM_Redundant          *red = (DM_Redundant *)dm->data;
12   ISLocalToGlobalMapping ltog;
13   PetscInt               i, rstart, rend, *cols;
14   PetscScalar           *vals;
15 
16   PetscFunctionBegin;
17   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
18   PetscCall(MatSetSizes(*J, red->n, red->n, red->N, red->N));
19   PetscCall(MatSetType(*J, dm->mattype));
20   PetscCall(MatSeqAIJSetPreallocation(*J, red->n, NULL));
21   PetscCall(MatSeqBAIJSetPreallocation(*J, 1, red->n, NULL));
22   PetscCall(MatMPIAIJSetPreallocation(*J, red->n, NULL, red->N - red->n, NULL));
23   PetscCall(MatMPIBAIJSetPreallocation(*J, 1, red->n, NULL, red->N - red->n, NULL));
24 
25   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
26   PetscCall(MatSetLocalToGlobalMapping(*J, ltog, ltog));
27   PetscCall(MatSetDM(*J, dm));
28 
29   PetscCall(PetscMalloc2(red->N, &cols, red->N, &vals));
30   for (i = 0; i < red->N; i++) {
31     cols[i] = i;
32     vals[i] = 0.0;
33   }
34   PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
35   for (i = rstart; i < rend; i++) { PetscCall(MatSetValues(*J, 1, &i, red->N, cols, vals, INSERT_VALUES)); }
36   PetscCall(PetscFree2(cols, vals));
37   PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
38   PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
39   PetscFunctionReturn(0);
40 }
41 
42 static PetscErrorCode DMDestroy_Redundant(DM dm) {
43   PetscFunctionBegin;
44   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", NULL));
45   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", NULL));
46   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", NULL));
47   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
48   PetscCall(PetscFree(dm->data));
49   PetscFunctionReturn(0);
50 }
51 
52 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm, Vec *gvec) {
53   DM_Redundant          *red = (DM_Redundant *)dm->data;
54   ISLocalToGlobalMapping ltog;
55 
56   PetscFunctionBegin;
57   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
58   PetscValidPointer(gvec, 2);
59   *gvec = NULL;
60   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm), gvec));
61   PetscCall(VecSetSizes(*gvec, red->n, red->N));
62   PetscCall(VecSetType(*gvec, dm->vectype));
63   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
64   PetscCall(VecSetLocalToGlobalMapping(*gvec, ltog));
65   PetscCall(VecSetDM(*gvec, dm));
66   PetscFunctionReturn(0);
67 }
68 
69 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm, Vec *lvec) {
70   DM_Redundant *red = (DM_Redundant *)dm->data;
71 
72   PetscFunctionBegin;
73   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
74   PetscValidPointer(lvec, 2);
75   *lvec = NULL;
76   PetscCall(VecCreate(PETSC_COMM_SELF, lvec));
77   PetscCall(VecSetSizes(*lvec, red->N, red->N));
78   PetscCall(VecSetType(*lvec, dm->vectype));
79   PetscCall(VecSetDM(*lvec, dm));
80   PetscFunctionReturn(0);
81 }
82 
83 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm, Vec l, InsertMode imode, Vec g) {
84   DM_Redundant      *red = (DM_Redundant *)dm->data;
85   const PetscScalar *lv;
86   PetscScalar       *gv;
87   PetscMPIInt        rank;
88 
89   PetscFunctionBegin;
90   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
91   PetscCall(VecGetArrayRead(l, &lv));
92   PetscCall(VecGetArray(g, &gv));
93   switch (imode) {
94   case ADD_VALUES:
95   case MAX_VALUES: {
96     void        *source;
97     PetscScalar *buffer;
98     PetscInt     i;
99     if (rank == red->rank) {
100       buffer = gv;
101       source = MPI_IN_PLACE;
102       if (imode == ADD_VALUES)
103         for (i = 0; i < red->N; i++) buffer[i] = gv[i] + lv[i];
104 #if !defined(PETSC_USE_COMPLEX)
105       if (imode == MAX_VALUES)
106         for (i = 0; i < red->N; i++) buffer[i] = PetscMax(gv[i], lv[i]);
107 #endif
108     } else source = (void *)lv;
109     PetscCallMPI(MPI_Reduce(source, gv, red->N, MPIU_SCALAR, (imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX, red->rank, PetscObjectComm((PetscObject)dm)));
110   } break;
111   case INSERT_VALUES: PetscCall(PetscArraycpy(gv, lv, red->n)); break;
112   default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
113   }
114   PetscCall(VecRestoreArrayRead(l, &lv));
115   PetscCall(VecRestoreArray(g, &gv));
116   PetscFunctionReturn(0);
117 }
118 
119 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm, Vec l, InsertMode imode, Vec g) {
120   PetscFunctionBegin;
121   PetscFunctionReturn(0);
122 }
123 
124 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm, Vec g, InsertMode imode, Vec l) {
125   DM_Redundant      *red = (DM_Redundant *)dm->data;
126   const PetscScalar *gv;
127   PetscScalar       *lv;
128 
129   PetscFunctionBegin;
130   PetscCall(VecGetArrayRead(g, &gv));
131   PetscCall(VecGetArray(l, &lv));
132   switch (imode) {
133   case INSERT_VALUES:
134     if (red->n) PetscCall(PetscArraycpy(lv, gv, red->n));
135     PetscCallMPI(MPI_Bcast(lv, red->N, MPIU_SCALAR, red->rank, PetscObjectComm((PetscObject)dm)));
136     break;
137   default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
138   }
139   PetscCall(VecRestoreArrayRead(g, &gv));
140   PetscCall(VecRestoreArray(l, &lv));
141   PetscFunctionReturn(0);
142 }
143 
144 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm, Vec g, InsertMode imode, Vec l) {
145   PetscFunctionBegin;
146   PetscFunctionReturn(0);
147 }
148 
149 static PetscErrorCode DMSetUp_Redundant(DM dm) {
150   PetscFunctionBegin;
151   PetscFunctionReturn(0);
152 }
153 
154 static PetscErrorCode DMView_Redundant(DM dm, PetscViewer viewer) {
155   DM_Redundant *red = (DM_Redundant *)dm->data;
156   PetscBool     iascii;
157 
158   PetscFunctionBegin;
159   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
160   if (iascii) { PetscCall(PetscViewerASCIIPrintf(viewer, "redundant: rank=%d N=%" PetscInt_FMT "\n", red->rank, red->N)); }
161   PetscFunctionReturn(0);
162 }
163 
164 static PetscErrorCode DMCreateColoring_Redundant(DM dm, ISColoringType ctype, ISColoring *coloring) {
165   DM_Redundant    *red = (DM_Redundant *)dm->data;
166   PetscInt         i, nloc;
167   ISColoringValue *colors;
168 
169   PetscFunctionBegin;
170   switch (ctype) {
171   case IS_COLORING_GLOBAL: nloc = red->n; break;
172   case IS_COLORING_LOCAL: nloc = red->N; break;
173   default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Unknown ISColoringType %d", (int)ctype);
174   }
175   PetscCall(PetscMalloc1(nloc, &colors));
176   for (i = 0; i < nloc; i++) colors[i] = i;
177   PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)dm), red->N, nloc, colors, PETSC_OWN_POINTER, coloring));
178   PetscCall(ISColoringSetType(*coloring, ctype));
179   PetscFunctionReturn(0);
180 }
181 
182 static PetscErrorCode DMRefine_Redundant(DM dmc, MPI_Comm comm, DM *dmf) {
183   PetscMPIInt   flag;
184   DM_Redundant *redc = (DM_Redundant *)dmc->data;
185 
186   PetscFunctionBegin;
187   if (comm == MPI_COMM_NULL) { PetscCall(PetscObjectGetComm((PetscObject)dmc, &comm)); }
188   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), comm, &flag));
189   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "cannot change communicators");
190   PetscCall(DMRedundantCreate(comm, redc->rank, redc->N, dmf));
191   PetscFunctionReturn(0);
192 }
193 
194 static PetscErrorCode DMCoarsen_Redundant(DM dmf, MPI_Comm comm, DM *dmc) {
195   PetscMPIInt   flag;
196   DM_Redundant *redf = (DM_Redundant *)dmf->data;
197 
198   PetscFunctionBegin;
199   if (comm == MPI_COMM_NULL) { PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm)); }
200   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmf), comm, &flag));
201   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
202   PetscCall(DMRedundantCreate(comm, redf->rank, redf->N, dmc));
203   PetscFunctionReturn(0);
204 }
205 
206 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc, DM dmf, Mat *P, Vec *scale) {
207   DM_Redundant *redc = (DM_Redundant *)dmc->data;
208   DM_Redundant *redf = (DM_Redundant *)dmf->data;
209   PetscMPIInt   flag;
210   PetscInt      i, rstart, rend;
211 
212   PetscFunctionBegin;
213   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), PetscObjectComm((PetscObject)dmf), &flag));
214   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
215   PetscCheck(redc->rank == redf->rank, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Owning rank does not match");
216   PetscCheck(redc->N == redf->N, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Global size does not match");
217   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), P));
218   PetscCall(MatSetSizes(*P, redc->n, redc->n, redc->N, redc->N));
219   PetscCall(MatSetType(*P, MATAIJ));
220   PetscCall(MatSeqAIJSetPreallocation(*P, 1, NULL));
221   PetscCall(MatMPIAIJSetPreallocation(*P, 1, NULL, 0, NULL));
222   PetscCall(MatGetOwnershipRange(*P, &rstart, &rend));
223   for (i = rstart; i < rend; i++) PetscCall(MatSetValue(*P, i, i, 1.0, INSERT_VALUES));
224   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
225   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
226   if (scale) PetscCall(DMCreateInterpolationScale(dmc, dmf, *P, scale));
227   PetscFunctionReturn(0);
228 }
229 
230 /*@
231     DMRedundantSetSize - Sets the size of a densely coupled redundant object
232 
233     Collective on dm
234 
235     Input Parameters:
236 +   dm - redundant DM
237 .   rank - rank of process to own redundant degrees of freedom
238 -   N - total number of redundant degrees of freedom
239 
240     Level: advanced
241 
242 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantGetSize()`
243 @*/
244 PetscErrorCode DMRedundantSetSize(DM dm, PetscMPIInt rank, PetscInt N) {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
247   PetscValidType(dm, 1);
248   PetscValidLogicalCollectiveMPIInt(dm, rank, 2);
249   PetscValidLogicalCollectiveInt(dm, N, 3);
250   PetscTryMethod(dm, "DMRedundantSetSize_C", (DM, PetscMPIInt, PetscInt), (dm, rank, N));
251   PetscFunctionReturn(0);
252 }
253 
254 /*@
255     DMRedundantGetSize - Gets the size of a densely coupled redundant object
256 
257     Not Collective
258 
259     Input Parameter:
260 .   dm - redundant DM
261 
262     Output Parameters:
263 +   rank - rank of process to own redundant degrees of freedom (or NULL)
264 -   N - total number of redundant degrees of freedom (or NULL)
265 
266     Level: advanced
267 
268 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantSetSize()`
269 @*/
270 PetscErrorCode DMRedundantGetSize(DM dm, PetscMPIInt *rank, PetscInt *N) {
271   PetscFunctionBegin;
272   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
273   PetscValidType(dm, 1);
274   PetscUseMethod(dm, "DMRedundantGetSize_C", (DM, PetscMPIInt *, PetscInt *), (dm, rank, N));
275   PetscFunctionReturn(0);
276 }
277 
278 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm, PetscMPIInt rank, PetscInt N) {
279   DM_Redundant *red = (DM_Redundant *)dm->data;
280   PetscMPIInt   myrank;
281   PetscInt      i, *globals;
282 
283   PetscFunctionBegin;
284   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &myrank));
285   red->rank = rank;
286   red->N    = N;
287   red->n    = (myrank == rank) ? N : 0;
288 
289   /* mapping is setup here */
290   PetscCall(PetscMalloc1(red->N, &globals));
291   for (i = 0; i < red->N; i++) globals[i] = i;
292   PetscCall(ISLocalToGlobalMappingDestroy(&dm->ltogmap));
293   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, red->N, globals, PETSC_OWN_POINTER, &dm->ltogmap));
294   PetscFunctionReturn(0);
295 }
296 
297 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm, PetscInt *rank, PetscInt *N) {
298   DM_Redundant *red = (DM_Redundant *)dm->data;
299 
300   PetscFunctionBegin;
301   if (rank) *rank = red->rank;
302   if (N) *N = red->N;
303   PetscFunctionReturn(0);
304 }
305 
306 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer) {
307   PetscFunctionBegin;
308   PetscFunctionReturn(0);
309 }
310 
311 /*MC
312    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
313          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
314          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
315          processes local computations).
316 
317          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
318 
319   Level: intermediate
320 
321 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreate()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
322 M*/
323 
324 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm) {
325   DM_Redundant *red;
326 
327   PetscFunctionBegin;
328   PetscCall(PetscNewLog(dm, &red));
329   dm->data = red;
330 
331   dm->ops->setup               = DMSetUp_Redundant;
332   dm->ops->view                = DMView_Redundant;
333   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
334   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
335   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
336   dm->ops->destroy             = DMDestroy_Redundant;
337   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
338   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
339   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
340   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
341   dm->ops->refine              = DMRefine_Redundant;
342   dm->ops->coarsen             = DMCoarsen_Redundant;
343   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
344   dm->ops->getcoloring         = DMCreateColoring_Redundant;
345 
346   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", DMRedundantSetSize_Redundant));
347   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", DMRedundantGetSize_Redundant));
348   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", DMSetUpGLVisViewer_Redundant));
349   PetscFunctionReturn(0);
350 }
351 
352 /*@C
353     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
354 
355     Collective
356 
357     Input Parameters:
358 +   comm - the processors that will share the global vector
359 .   rank - rank to own the redundant values
360 -   N - total number of degrees of freedom
361 
362     Output Parameters:
363 .   dm - the redundant DM
364 
365     Level: advanced
366 
367 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMCreateMatrix()`, `DMCompositeAddDM()`, `DMREDUNDANT`, `DMSetType()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
368 
369 @*/
370 PetscErrorCode DMRedundantCreate(MPI_Comm comm, PetscMPIInt rank, PetscInt N, DM *dm) {
371   PetscFunctionBegin;
372   PetscValidPointer(dm, 4);
373   PetscCall(DMCreate(comm, dm));
374   PetscCall(DMSetType(*dm, DMREDUNDANT));
375   PetscCall(DMRedundantSetSize(*dm, rank, N));
376   PetscCall(DMSetUp(*dm));
377   PetscFunctionReturn(0);
378 }
379