xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 4279555e62e618147958e52682840638ab8c8be0)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
9 
10 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
11 {
12   Vec_MPI *v = (Vec_MPI *)vin->data;
13 
14   PetscFunctionBegin;
15   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
16   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
17   v->array         = (PetscScalar *)a;
18   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
19   PetscFunctionReturn(PETSC_SUCCESS);
20 }
21 
22 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
23 {
24   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
25   PetscScalar *array;
26 
27   PetscFunctionBegin;
28   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
29   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
30 
31   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
32   vw = (Vec_MPI *)(*v)->data;
33   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(*win->ops)));
34 
35   /* save local representation of the parallel vector (and scatter) if it exists */
36   if (w->localrep) {
37     PetscCall(VecGetArray(*v, &array));
38     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
39     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(*w->localrep->ops)));
40     PetscCall(VecRestoreArray(*v, &array));
41 
42     vw->localupdate = w->localupdate;
43     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
44   }
45 
46   /* New vector should inherit stashing property of parent */
47   (*v)->stash.donotstash   = win->stash.donotstash;
48   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
49 
50   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
51   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
52 
53   (*v)->map->bs   = PetscAbs(win->map->bs);
54   (*v)->bstash.bs = win->bstash.bs;
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
59 {
60   Vec_MPI *v = (Vec_MPI *)V->data;
61 
62   PetscFunctionBegin;
63   switch (op) {
64   case VEC_IGNORE_OFF_PROC_ENTRIES:
65     V->stash.donotstash = flag;
66     break;
67   case VEC_IGNORE_NEGATIVE_INDICES:
68     V->stash.ignorenegidx = flag;
69     break;
70   case VEC_SUBSET_OFF_PROC_ENTRIES:
71     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
72     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
73       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
74       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
75     }
76     break;
77   }
78 
79   PetscFunctionReturn(PETSC_SUCCESS);
80 }
81 
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI *v = (Vec_MPI *)vin->data;
85 
86   PetscFunctionBegin;
87   v->array         = v->unplacedarray;
88   v->unplacedarray = NULL;
89   if (v->localrep) PetscCall(VecResetArray(v->localrep));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
94 {
95   Vec                X   = (Vec)ctx;
96   Vec_MPI           *x   = (Vec_MPI *)X->data;
97   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
98   PetscInt           bs  = X->map->bs;
99 
100   PetscFunctionBegin;
101   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
102      messages can be empty, but we have to send them this time if we sent them before because the
103      receiver is expecting them.
104    */
105   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
106     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
107     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
108   }
109   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
110     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
111     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
112   }
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
117 {
118   Vec                X   = (Vec)ctx;
119   Vec_MPI           *x   = (Vec_MPI *)X->data;
120   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
121   PetscInt           bs  = X->map->bs;
122   VecAssemblyFrame  *frame;
123 
124   PetscFunctionBegin;
125   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
126 
127   if (hdr->count) {
128     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
129     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
130     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
131     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
132     frame->pendings = 2;
133   } else {
134     frame->ints     = NULL;
135     frame->scalars  = NULL;
136     frame->pendings = 0;
137   }
138 
139   if (hdr->bcount) {
140     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
141     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
142     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
143     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
144     frame->pendingb = 2;
145   } else {
146     frame->intb     = NULL;
147     frame->scalarb  = NULL;
148     frame->pendingb = 0;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
154 {
155   Vec_MPI *x = (Vec_MPI *)X->data;
156   MPI_Comm comm;
157   PetscInt i, j, jb, bs;
158 
159   PetscFunctionBegin;
160   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
161 
162   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
163   PetscCall(VecGetBlockSize(X, &bs));
164   if (PetscDefined(USE_DEBUG)) {
165     InsertMode addv;
166     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
167     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
168   }
169   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
170 
171   PetscCall(VecStashSortCompress_Private(&X->stash));
172   PetscCall(VecStashSortCompress_Private(&X->bstash));
173 
174   if (!x->sendranks) {
175     PetscMPIInt nowners, bnowners, *owners, *bowners;
176     PetscInt    ntmp;
177     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
178     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
179     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
180     x->nsendranks = ntmp;
181     PetscCall(PetscFree(owners));
182     PetscCall(PetscFree(bowners));
183     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
184     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
185   }
186   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
187     PetscMPIInt rank         = x->sendranks[i];
188     x->sendhdr[i].insertmode = X->stash.insertmode;
189     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
190      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
191      * there is nothing new to send, so that size-zero messages get sent instead. */
192     x->sendhdr[i].count = 0;
193     if (X->stash.n) {
194       x->sendptrs[i].ints    = &X->stash.idx[j];
195       x->sendptrs[i].scalars = &X->stash.array[j];
196       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
197     }
198     x->sendhdr[i].bcount = 0;
199     if (X->bstash.n) {
200       x->sendptrs[i].intb    = &X->bstash.idx[jb];
201       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
202       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
203     }
204   }
205 
206   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
207   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
208   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
209   if (x->first_assembly_done) { /* this is not the first assembly */
210     PetscMPIInt tag[4];
211     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
212     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
213     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
214     x->use_status = PETSC_TRUE;
215   } else { /* First time assembly */
216     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
217     x->use_status = PETSC_FALSE;
218   }
219 
220   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
221      This line says when assembly_subset is set, then we mark that the first assembly is done.
222    */
223   x->first_assembly_done = x->assembly_subset;
224 
225   {
226     PetscInt nstash, reallocs;
227     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
228     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
229     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
230     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
231   }
232   PetscFunctionReturn(PETSC_SUCCESS);
233 }
234 
235 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
236 {
237   Vec_MPI          *x  = (Vec_MPI *)X->data;
238   PetscInt          bs = X->map->bs;
239   PetscMPIInt       npending, *some_indices, r;
240   MPI_Status       *some_statuses;
241   PetscScalar      *xarray;
242   VecAssemblyFrame *frame;
243 
244   PetscFunctionBegin;
245   if (X->stash.donotstash) {
246     X->stash.insertmode  = NOT_SET_VALUES;
247     X->bstash.insertmode = NOT_SET_VALUES;
248     PetscFunctionReturn(PETSC_SUCCESS);
249   }
250 
251   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
252   PetscCall(VecGetArray(X, &xarray));
253   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
254   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
255   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
256   while (npending > 0) {
257     PetscMPIInt ndone = 0, ii;
258     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
259      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
260      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
261      * subsequent assembly can set a proper subset of the values. */
262     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
263     for (ii = 0; ii < ndone; ii++) {
264       PetscInt     i     = some_indices[ii] / 4, j, k;
265       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
266       PetscInt    *recvint;
267       PetscScalar *recvscalar;
268       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
269       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
270       npending--;
271       if (!blockmsg) { /* Scalar stash */
272         PetscMPIInt count;
273         if (--frame[i].pendings > 0) continue;
274         if (x->use_status) {
275           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
276         } else count = x->recvhdr[i].count;
277         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
278           PetscInt loc = *recvint - X->map->rstart;
279           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
280           switch (imode) {
281           case ADD_VALUES:
282             xarray[loc] += *recvscalar++;
283             break;
284           case INSERT_VALUES:
285             xarray[loc] = *recvscalar++;
286             break;
287           default:
288             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
289           }
290         }
291       } else { /* Block stash */
292         PetscMPIInt count;
293         if (--frame[i].pendingb > 0) continue;
294         if (x->use_status) {
295           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
296           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
297         } else count = x->recvhdr[i].bcount;
298         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
299           PetscInt loc = (*recvint) * bs - X->map->rstart;
300           switch (imode) {
301           case ADD_VALUES:
302             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
306             break;
307           default:
308             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
309           }
310         }
311       }
312     }
313   }
314   PetscCall(VecRestoreArray(X, &xarray));
315   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
316   PetscCall(PetscFree2(some_indices, some_statuses));
317   if (x->assembly_subset) {
318     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
319     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
320   } else {
321     PetscCall(VecAssemblyReset_MPI(X));
322   }
323 
324   X->stash.insertmode  = NOT_SET_VALUES;
325   X->bstash.insertmode = NOT_SET_VALUES;
326   PetscCall(VecStashScatterEnd_Private(&X->stash));
327   PetscCall(VecStashScatterEnd_Private(&X->bstash));
328   PetscFunctionReturn(PETSC_SUCCESS);
329 }
330 
331 PetscErrorCode VecAssemblyReset_MPI(Vec X)
332 {
333   Vec_MPI *x = (Vec_MPI *)X->data;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscFree(x->sendreqs));
337   PetscCall(PetscFree(x->recvreqs));
338   PetscCall(PetscFree(x->sendranks));
339   PetscCall(PetscFree(x->recvranks));
340   PetscCall(PetscFree(x->sendhdr));
341   PetscCall(PetscFree(x->recvhdr));
342   PetscCall(PetscFree(x->sendptrs));
343   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
344   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
350 {
351 #if !defined(PETSC_HAVE_MPIUNI)
352   PetscBool flg = PETSC_FALSE, set;
353 
354   PetscFunctionBegin;
355   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
356   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
357   if (set) {
358     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
359     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
360   }
361   PetscOptionsHeadEnd();
362 #else
363   PetscFunctionBegin;
364   X->ops->assemblybegin = VecAssemblyBegin_MPI;
365   X->ops->assemblyend   = VecAssemblyEnd_MPI;
366 #endif
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
371                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
372                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
373                                PetscDesignatedInitializer(dot, VecDot_MPI),
374                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
375                                PetscDesignatedInitializer(norm, VecNorm_MPI),
376                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
377                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
378                                PetscDesignatedInitializer(scale, VecScale_Seq),
379                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
380                                PetscDesignatedInitializer(set, VecSet_Seq),
381                                PetscDesignatedInitializer(swap, VecSwap_Seq),
382                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
383                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
384                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
385                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
386                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
387                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
388                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
389                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
390                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
391                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
392                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
393                                PetscDesignatedInitializer(getarray, NULL),
394                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
395                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
396                                PetscDesignatedInitializer(restorearray, NULL),
397                                PetscDesignatedInitializer(max, VecMax_MPI),
398                                PetscDesignatedInitializer(min, VecMin_MPI),
399                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
400                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
401                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
402                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
403                                PetscDesignatedInitializer(view, VecView_MPI),
404                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
405                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
406                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
407                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
408                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
409                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
410                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
411                                PetscDesignatedInitializer(load, VecLoad_Default),
412                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
413                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
414                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
415                                PetscDesignatedInitializer(setvalueslocal, NULL),
416                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
417                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
418                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
419                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
420                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
421                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
422                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
423                                PetscDesignatedInitializer(sqrt, NULL),
424                                PetscDesignatedInitializer(abs, NULL),
425                                PetscDesignatedInitializer(exp, NULL),
426                                PetscDesignatedInitializer(log, NULL),
427                                PetscDesignatedInitializer(shift, NULL),
428                                PetscDesignatedInitializer(create, NULL), /* really? */
429                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
430                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
431                                PetscDesignatedInitializer(dotnorm2, NULL),
432                                PetscDesignatedInitializer(getsubvector, NULL),
433                                PetscDesignatedInitializer(restoresubvector, NULL),
434                                PetscDesignatedInitializer(getarrayread, NULL),
435                                PetscDesignatedInitializer(restorearrayread, NULL),
436                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
437                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
438                                PetscDesignatedInitializer(viewnative, VecView_MPI),
439                                PetscDesignatedInitializer(loadnative, NULL),
440                                PetscDesignatedInitializer(createlocalvector, NULL),
441                                PetscDesignatedInitializer(getlocalvector, NULL),
442                                PetscDesignatedInitializer(restorelocalvector, NULL),
443                                PetscDesignatedInitializer(getlocalvectorread, NULL),
444                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
445                                PetscDesignatedInitializer(bindtocpu, NULL),
446                                PetscDesignatedInitializer(getarraywrite, NULL),
447                                PetscDesignatedInitializer(restorearraywrite, NULL),
448                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
450                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
453                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(concatenate, NULL),
455                                PetscDesignatedInitializer(sum, NULL),
456                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
457                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
458 
459 /*
460     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
461     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
462     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
463 
464     If alloc is true and array is NULL then this routine allocates the space, otherwise
465     no space is allocated.
466 */
467 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
468 {
469   Vec_MPI *s;
470 
471   PetscFunctionBegin;
472   PetscCall(PetscNew(&s));
473   v->data = (void *)s;
474   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
475   s->nghost      = nghost;
476   v->petscnative = PETSC_TRUE;
477   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
478 
479   PetscCall(PetscLayoutSetUp(v->map));
480 
481   s->array           = (PetscScalar *)array;
482   s->array_allocated = NULL;
483   if (alloc && !array) {
484     PetscInt n = v->map->n + nghost;
485     PetscCall(PetscCalloc1(n, &s->array));
486     s->array_allocated = s->array;
487   }
488 
489   /* By default parallel vectors do not have local representation */
490   s->localrep    = NULL;
491   s->localupdate = NULL;
492 
493   v->stash.insertmode  = NOT_SET_VALUES;
494   v->bstash.insertmode = NOT_SET_VALUES;
495   /* create the stashes. The block-size for bstash is set later when
496      VecSetValuesBlocked is called.
497   */
498   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
499   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
500 
501 #if defined(PETSC_HAVE_MATLAB)
502   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
503   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
504 #endif
505   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
506   PetscFunctionReturn(PETSC_SUCCESS);
507 }
508 
509 /*MC
510    VECMPI - VECMPI = "mpi" - The basic parallel vector
511 
512    Options Database Key:
513 . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`
514 
515   Level: beginner
516 
517 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
518 M*/
519 
520 PetscErrorCode VecCreate_MPI(Vec vv)
521 {
522   PetscFunctionBegin;
523   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
524   PetscFunctionReturn(PETSC_SUCCESS);
525 }
526 
527 /*MC
528    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process
529 
530    Options Database Key:
531 . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`
532 
533   Level: beginner
534 
535 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
536 M*/
537 
538 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
539 {
540   PetscMPIInt size;
541 
542   PetscFunctionBegin;
543   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
544   if (size == 1) {
545     PetscCall(VecSetType(v, VECSEQ));
546   } else {
547     PetscCall(VecSetType(v, VECMPI));
548   }
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 /*@C
553    VecCreateMPIWithArray - Creates a parallel, array-style vector,
554    where the user provides the array space to store the vector values.
555 
556    Collective
557 
558    Input Parameters:
559 +  comm  - the MPI communicator to use
560 .  bs    - block size, same meaning as `VecSetBlockSize()`
561 .  n     - local vector length, cannot be `PETSC_DECIDE`
562 .  N     - global vector length (or `PETSC_DETERMINE` to have calculated)
563 -  array - the user provided array to store the vector values
564 
565    Output Parameter:
566 .  vv - the vector
567 
568    Level: intermediate
569 
570    Notes:
571    Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
572    same type as an existing vector.
573 
574    If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
575    at a later stage to SET the array for storing the vector values.
576 
577    PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.
578 
579    The user should not free `array` until the vector is destroyed.
580 
581 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
582           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
583 @*/
584 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
585 {
586   PetscFunctionBegin;
587   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
588   PetscCall(PetscSplitOwnership(comm, &n, &N));
589   PetscCall(VecCreate(comm, vv));
590   PetscCall(VecSetSizes(*vv, n, N));
591   PetscCall(VecSetBlockSize(*vv, bs));
592   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
593   PetscFunctionReturn(PETSC_SUCCESS);
594 }
595 
596 /*@C
597    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
598    the caller allocates the array space.
599 
600    Collective
601 
602    Input Parameters:
603 +  comm - the MPI communicator to use
604 .  n - local vector length
605 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
606 .  nghost - number of local ghost points
607 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
608 -  array - the space to store the vector values (as long as n + nghost)
609 
610    Output Parameter:
611 .  vv - the global vector representation (without ghost points as part of vector)
612 
613    Level: advanced
614 
615    Notes:
616    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
617    of the vector.
618 
619    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
620 
621 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
622           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
623           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
624 @*/
625 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
626 {
627   Vec_MPI               *w;
628   PetscScalar           *larray;
629   IS                     from, to;
630   ISLocalToGlobalMapping ltog;
631   PetscInt               rstart, i, *indices;
632 
633   PetscFunctionBegin;
634   *vv = NULL;
635 
636   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
637   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
638   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
639   PetscCall(PetscSplitOwnership(comm, &n, &N));
640   /* Create global representation */
641   PetscCall(VecCreate(comm, vv));
642   PetscCall(VecSetSizes(*vv, n, N));
643   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
644   w = (Vec_MPI *)(*vv)->data;
645   /* Create local representation */
646   PetscCall(VecGetArray(*vv, &larray));
647   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
648   PetscCall(VecRestoreArray(*vv, &larray));
649 
650   /*
651        Create scatter context for scattering (updating) ghost values
652   */
653   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
654   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
655   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
656   PetscCall(ISDestroy(&to));
657   PetscCall(ISDestroy(&from));
658 
659   /* set local to global mapping for ghosted vector */
660   PetscCall(PetscMalloc1(n + nghost, &indices));
661   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
662   for (i = 0; i < n; i++) indices[i] = rstart + i;
663   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
664   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
665   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
666   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
667   PetscFunctionReturn(PETSC_SUCCESS);
668 }
669 
670 /*@
671    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
672 
673    Collective
674 
675    Input Parameters:
676 +  comm - the MPI communicator to use
677 .  n - local vector length
678 .  N - global vector length (or `PETSC_DETERMEINE` to have calculated if `n` is given)
679 .  nghost - number of local ghost points
680 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
681 
682    Output Parameter:
683 .  vv - the global vector representation (without ghost points as part of vector)
684 
685    Level: advanced
686 
687    Notes:
688    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
689    of the vector.
690 
691    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
692 
693 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
694           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
695           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
696           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
697 
698 @*/
699 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
700 {
701   PetscFunctionBegin;
702   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
703   PetscFunctionReturn(PETSC_SUCCESS);
704 }
705 
706 /*@
707    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
708 
709    Collective
710 
711    Input Parameters:
712 +  vv - the MPI vector
713 .  nghost - number of local ghost points
714 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
715 
716    Level: advanced
717 
718    Notes:
719    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
720    of the vector.
721 
722    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
723 
724    You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).
725 
726 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
727           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
728           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
729           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
730 @*/
731 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
732 {
733   PetscBool flg;
734 
735   PetscFunctionBegin;
736   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
737   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
738   if (flg) {
739     PetscInt               n, N;
740     Vec_MPI               *w;
741     PetscScalar           *larray;
742     IS                     from, to;
743     ISLocalToGlobalMapping ltog;
744     PetscInt               rstart, i, *indices;
745     MPI_Comm               comm;
746 
747     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
748     n = vv->map->n;
749     N = vv->map->N;
750     PetscUseTypeMethod(vv, destroy);
751     PetscCall(VecSetSizes(vv, n, N));
752     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
753     w = (Vec_MPI *)(vv)->data;
754     /* Create local representation */
755     PetscCall(VecGetArray(vv, &larray));
756     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
757     PetscCall(VecRestoreArray(vv, &larray));
758 
759     /*
760      Create scatter context for scattering (updating) ghost values
761      */
762     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
763     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
764     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
765     PetscCall(ISDestroy(&to));
766     PetscCall(ISDestroy(&from));
767 
768     /* set local to global mapping for ghosted vector */
769     PetscCall(PetscMalloc1(n + nghost, &indices));
770     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
771 
772     for (i = 0; i < n; i++) indices[i] = rstart + i;
773     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
774 
775     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
776     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
777     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
778   } else {
779     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
780     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
781   }
782   PetscFunctionReturn(PETSC_SUCCESS);
783 }
784 
785 /* ------------------------------------------------------------------------------------------*/
786 /*@C
787    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
788    the caller allocates the array space. Indices in the ghost region are based on blocks.
789 
790    Collective
791 
792    Input Parameters:
793 +  comm - the MPI communicator to use
794 .  bs - block size
795 .  n - local vector length
796 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
797 .  nghost - number of local ghost blocks
798 .  ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
799 -  array - the space to store the vector values (as long as `n + nghost*bs`)
800 
801    Output Parameter:
802 .  vv - the global vector representation (without ghost points as part of vector)
803 
804    Level: advanced
805 
806    Notes:
807    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
808    of the vector.
809 
810    n is the local vector size (total local size not the number of blocks) while nghost
811    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
812    portion is bs*nghost
813 
814 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
815           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
816           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
817 @*/
818 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
819 {
820   Vec_MPI               *w;
821   PetscScalar           *larray;
822   IS                     from, to;
823   ISLocalToGlobalMapping ltog;
824   PetscInt               rstart, i, nb, *indices;
825 
826   PetscFunctionBegin;
827   *vv = NULL;
828 
829   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
830   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
831   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
832   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
833   PetscCall(PetscSplitOwnership(comm, &n, &N));
834   /* Create global representation */
835   PetscCall(VecCreate(comm, vv));
836   PetscCall(VecSetSizes(*vv, n, N));
837   PetscCall(VecSetBlockSize(*vv, bs));
838   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
839   w = (Vec_MPI *)(*vv)->data;
840   /* Create local representation */
841   PetscCall(VecGetArray(*vv, &larray));
842   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
843   PetscCall(VecRestoreArray(*vv, &larray));
844 
845   /*
846        Create scatter context for scattering (updating) ghost values
847   */
848   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
849   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
850   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
851   PetscCall(ISDestroy(&to));
852   PetscCall(ISDestroy(&from));
853 
854   /* set local to global mapping for ghosted vector */
855   nb = n / bs;
856   PetscCall(PetscMalloc1(nb + nghost, &indices));
857   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
858   rstart = rstart / bs;
859 
860   for (i = 0; i < nb; i++) indices[i] = rstart + i;
861   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
862 
863   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
864   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
865   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
866   PetscFunctionReturn(PETSC_SUCCESS);
867 }
868 
869 /*@
870    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
871         The indicing of the ghost points is done with blocks.
872 
873    Collective
874 
875    Input Parameters:
876 +  comm - the MPI communicator to use
877 .  bs - the block size
878 .  n - local vector length
879 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
880 .  nghost - number of local ghost blocks
881 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
882 
883    Output Parameter:
884 .  vv - the global vector representation (without ghost points as part of vector)
885 
886    Level: advanced
887 
888    Notes:
889    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
890    of the vector.
891 
892    `n` is the local vector size (total local size not the number of blocks) while `nghost`
893    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
894    portion is `bs*nghost`
895 
896 .seealso: [](chapter_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
897           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
898           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
899 @*/
900 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
901 {
902   PetscFunctionBegin;
903   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
904   PetscFunctionReturn(PETSC_SUCCESS);
905 }
906