xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision a43132046c00290928454b92120f2306b39b67d0)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
9 
10 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
11 {
12   Vec_MPI *v = (Vec_MPI *)vin->data;
13 
14   PetscFunctionBegin;
15   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
16   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
17   v->array         = (PetscScalar *)a;
18   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
19   PetscFunctionReturn(PETSC_SUCCESS);
20 }
21 
22 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
23 {
24   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
25   PetscScalar *array;
26 
27   PetscFunctionBegin;
28   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
29   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
30 
31   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
32   vw           = (Vec_MPI *)(*v)->data;
33   (*v)->ops[0] = win->ops[0];
34 
35   /* save local representation of the parallel vector (and scatter) if it exists */
36   if (w->localrep) {
37     PetscCall(VecGetArray(*v, &array));
38     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
39     vw->localrep->ops[0] = w->localrep->ops[0];
40     PetscCall(VecRestoreArray(*v, &array));
41 
42     vw->localupdate = w->localupdate;
43     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
44   }
45 
46   /* New vector should inherit stashing property of parent */
47   (*v)->stash.donotstash   = win->stash.donotstash;
48   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
49 
50   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
51   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
52 
53   (*v)->map->bs   = PetscAbs(win->map->bs);
54   (*v)->bstash.bs = win->bstash.bs;
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
59 {
60   Vec_MPI *v = (Vec_MPI *)V->data;
61 
62   PetscFunctionBegin;
63   switch (op) {
64   case VEC_IGNORE_OFF_PROC_ENTRIES:
65     V->stash.donotstash = flag;
66     break;
67   case VEC_IGNORE_NEGATIVE_INDICES:
68     V->stash.ignorenegidx = flag;
69     break;
70   case VEC_SUBSET_OFF_PROC_ENTRIES:
71     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
72     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
73       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
74       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
75     }
76     break;
77   }
78 
79   PetscFunctionReturn(PETSC_SUCCESS);
80 }
81 
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI *v = (Vec_MPI *)vin->data;
85 
86   PetscFunctionBegin;
87   v->array         = v->unplacedarray;
88   v->unplacedarray = NULL;
89   if (v->localrep) PetscCall(VecResetArray(v->localrep));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
94 {
95   Vec                X   = (Vec)ctx;
96   Vec_MPI           *x   = (Vec_MPI *)X->data;
97   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
98   PetscInt           bs  = X->map->bs;
99 
100   PetscFunctionBegin;
101   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
102      messages can be empty, but we have to send them this time if we sent them before because the
103      receiver is expecting them.
104    */
105   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
106     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
107     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
108   }
109   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
110     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
111     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
112   }
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
117 {
118   Vec                X   = (Vec)ctx;
119   Vec_MPI           *x   = (Vec_MPI *)X->data;
120   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
121   PetscInt           bs  = X->map->bs;
122   VecAssemblyFrame  *frame;
123 
124   PetscFunctionBegin;
125   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
126 
127   if (hdr->count) {
128     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
129     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
130     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
131     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
132     frame->pendings = 2;
133   } else {
134     frame->ints     = NULL;
135     frame->scalars  = NULL;
136     frame->pendings = 0;
137   }
138 
139   if (hdr->bcount) {
140     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
141     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
142     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
143     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
144     frame->pendingb = 2;
145   } else {
146     frame->intb     = NULL;
147     frame->scalarb  = NULL;
148     frame->pendingb = 0;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
154 {
155   Vec_MPI *x = (Vec_MPI *)X->data;
156   MPI_Comm comm;
157   PetscInt i, j, jb, bs;
158 
159   PetscFunctionBegin;
160   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
161 
162   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
163   PetscCall(VecGetBlockSize(X, &bs));
164   if (PetscDefined(USE_DEBUG)) {
165     InsertMode addv;
166     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
167     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
168   }
169   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
170 
171   PetscCall(VecStashSortCompress_Private(&X->stash));
172   PetscCall(VecStashSortCompress_Private(&X->bstash));
173 
174   if (!x->sendranks) {
175     PetscMPIInt nowners, bnowners, *owners, *bowners;
176     PetscInt    ntmp;
177     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
178     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
179     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
180     x->nsendranks = ntmp;
181     PetscCall(PetscFree(owners));
182     PetscCall(PetscFree(bowners));
183     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
184     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
185   }
186   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
187     PetscMPIInt rank         = x->sendranks[i];
188     x->sendhdr[i].insertmode = X->stash.insertmode;
189     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
190      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
191      * there is nothing new to send, so that size-zero messages get sent instead. */
192     x->sendhdr[i].count = 0;
193     if (X->stash.n) {
194       x->sendptrs[i].ints    = &X->stash.idx[j];
195       x->sendptrs[i].scalars = &X->stash.array[j];
196       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
197     }
198     x->sendhdr[i].bcount = 0;
199     if (X->bstash.n) {
200       x->sendptrs[i].intb    = &X->bstash.idx[jb];
201       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
202       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
203     }
204   }
205 
206   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
207   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
208   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
209   if (x->first_assembly_done) { /* this is not the first assembly */
210     PetscMPIInt tag[4];
211     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
212     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
213     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
214     x->use_status = PETSC_TRUE;
215   } else { /* First time assembly */
216     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
217     x->use_status = PETSC_FALSE;
218   }
219 
220   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
221      This line says when assembly_subset is set, then we mark that the first assembly is done.
222    */
223   x->first_assembly_done = x->assembly_subset;
224 
225   {
226     PetscInt nstash, reallocs;
227     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
228     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
229     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
230     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
231   }
232   PetscFunctionReturn(PETSC_SUCCESS);
233 }
234 
235 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
236 {
237   Vec_MPI          *x  = (Vec_MPI *)X->data;
238   PetscInt          bs = X->map->bs;
239   PetscMPIInt       npending, *some_indices, r;
240   MPI_Status       *some_statuses;
241   PetscScalar      *xarray;
242   VecAssemblyFrame *frame;
243 
244   PetscFunctionBegin;
245   if (X->stash.donotstash) {
246     X->stash.insertmode  = NOT_SET_VALUES;
247     X->bstash.insertmode = NOT_SET_VALUES;
248     PetscFunctionReturn(PETSC_SUCCESS);
249   }
250 
251   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
252   PetscCall(VecGetArray(X, &xarray));
253   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
254   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
255   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
256   while (npending > 0) {
257     PetscMPIInt ndone = 0, ii;
258     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
259      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
260      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
261      * subsequent assembly can set a proper subset of the values. */
262     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
263     for (ii = 0; ii < ndone; ii++) {
264       PetscInt     i     = some_indices[ii] / 4, j, k;
265       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
266       PetscInt    *recvint;
267       PetscScalar *recvscalar;
268       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
269       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
270       npending--;
271       if (!blockmsg) { /* Scalar stash */
272         PetscMPIInt count;
273         if (--frame[i].pendings > 0) continue;
274         if (x->use_status) {
275           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
276         } else count = x->recvhdr[i].count;
277         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
278           PetscInt loc = *recvint - X->map->rstart;
279           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
280           switch (imode) {
281           case ADD_VALUES:
282             xarray[loc] += *recvscalar++;
283             break;
284           case INSERT_VALUES:
285             xarray[loc] = *recvscalar++;
286             break;
287           default:
288             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
289           }
290         }
291       } else { /* Block stash */
292         PetscMPIInt count;
293         if (--frame[i].pendingb > 0) continue;
294         if (x->use_status) {
295           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
296           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
297         } else count = x->recvhdr[i].bcount;
298         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
299           PetscInt loc = (*recvint) * bs - X->map->rstart;
300           switch (imode) {
301           case ADD_VALUES:
302             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
306             break;
307           default:
308             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
309           }
310         }
311       }
312     }
313   }
314   PetscCall(VecRestoreArray(X, &xarray));
315   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
316   PetscCall(PetscFree2(some_indices, some_statuses));
317   if (x->assembly_subset) {
318     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
319     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
320   } else {
321     PetscCall(VecAssemblyReset_MPI(X));
322   }
323 
324   X->stash.insertmode  = NOT_SET_VALUES;
325   X->bstash.insertmode = NOT_SET_VALUES;
326   PetscCall(VecStashScatterEnd_Private(&X->stash));
327   PetscCall(VecStashScatterEnd_Private(&X->bstash));
328   PetscFunctionReturn(PETSC_SUCCESS);
329 }
330 
331 PetscErrorCode VecAssemblyReset_MPI(Vec X)
332 {
333   Vec_MPI *x = (Vec_MPI *)X->data;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscFree(x->sendreqs));
337   PetscCall(PetscFree(x->recvreqs));
338   PetscCall(PetscFree(x->sendranks));
339   PetscCall(PetscFree(x->recvranks));
340   PetscCall(PetscFree(x->sendhdr));
341   PetscCall(PetscFree(x->recvhdr));
342   PetscCall(PetscFree(x->sendptrs));
343   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
344   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
350 {
351 #if !defined(PETSC_HAVE_MPIUNI)
352   PetscBool flg = PETSC_FALSE, set;
353 
354   PetscFunctionBegin;
355   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
356   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
357   if (set) {
358     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
359     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
360   }
361   PetscOptionsHeadEnd();
362 #else
363   PetscFunctionBegin;
364   X->ops->assemblybegin = VecAssemblyBegin_MPI;
365   X->ops->assemblyend   = VecAssemblyEnd_MPI;
366 #endif
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
371                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
372                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
373                                PetscDesignatedInitializer(dot, VecDot_MPI),
374                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
375                                PetscDesignatedInitializer(norm, VecNorm_MPI),
376                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
377                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
378                                PetscDesignatedInitializer(scale, VecScale_Seq),
379                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
380                                PetscDesignatedInitializer(set, VecSet_Seq),
381                                PetscDesignatedInitializer(swap, VecSwap_Seq),
382                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
383                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
384                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
385                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
386                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
387                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
388                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
389                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
390                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
391                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
392                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
393                                PetscDesignatedInitializer(getarray, NULL),
394                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
395                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
396                                PetscDesignatedInitializer(restorearray, NULL),
397                                PetscDesignatedInitializer(max, VecMax_MPI),
398                                PetscDesignatedInitializer(min, VecMin_MPI),
399                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
400                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
401                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
402                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
403                                PetscDesignatedInitializer(view, VecView_MPI),
404                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
405                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
406                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
407                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
408                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
409                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
410                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
411                                PetscDesignatedInitializer(load, VecLoad_Default),
412                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
413                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
414                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
415                                PetscDesignatedInitializer(setvalueslocal, NULL),
416                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
417                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
418                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
419                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
420                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
421                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
422                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
423                                PetscDesignatedInitializer(sqrt, NULL),
424                                PetscDesignatedInitializer(abs, NULL),
425                                PetscDesignatedInitializer(exp, NULL),
426                                PetscDesignatedInitializer(log, NULL),
427                                PetscDesignatedInitializer(shift, NULL),
428                                PetscDesignatedInitializer(create, NULL), /* really? */
429                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
430                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
431                                PetscDesignatedInitializer(dotnorm2, NULL),
432                                PetscDesignatedInitializer(getsubvector, NULL),
433                                PetscDesignatedInitializer(restoresubvector, NULL),
434                                PetscDesignatedInitializer(getarrayread, NULL),
435                                PetscDesignatedInitializer(restorearrayread, NULL),
436                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
437                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
438                                PetscDesignatedInitializer(viewnative, VecView_MPI),
439                                PetscDesignatedInitializer(loadnative, NULL),
440                                PetscDesignatedInitializer(createlocalvector, NULL),
441                                PetscDesignatedInitializer(getlocalvector, NULL),
442                                PetscDesignatedInitializer(restorelocalvector, NULL),
443                                PetscDesignatedInitializer(getlocalvectorread, NULL),
444                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
445                                PetscDesignatedInitializer(bindtocpu, NULL),
446                                PetscDesignatedInitializer(getarraywrite, NULL),
447                                PetscDesignatedInitializer(restorearraywrite, NULL),
448                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
450                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
453                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(concatenate, NULL),
455                                PetscDesignatedInitializer(sum, NULL),
456                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
457                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI),
458                                PetscDesignatedInitializer(errorwnorm, NULL)};
459 
460 /*
461     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
462     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
463     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
464 
465     If alloc is true and array is NULL then this routine allocates the space, otherwise
466     no space is allocated.
467 */
468 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
469 {
470   Vec_MPI *s;
471 
472   PetscFunctionBegin;
473   PetscCall(PetscNew(&s));
474   v->data        = (void *)s;
475   v->ops[0]      = DvOps;
476   s->nghost      = nghost;
477   v->petscnative = PETSC_TRUE;
478   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
479 
480   PetscCall(PetscLayoutSetUp(v->map));
481 
482   s->array           = (PetscScalar *)array;
483   s->array_allocated = NULL;
484   if (alloc && !array) {
485     PetscInt n = v->map->n + nghost;
486     PetscCall(PetscCalloc1(n, &s->array));
487     s->array_allocated = s->array;
488     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_2], 0));
489     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_1], 0));
490     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_INFINITY], 0));
491   }
492 
493   /* By default parallel vectors do not have local representation */
494   s->localrep    = NULL;
495   s->localupdate = NULL;
496 
497   v->stash.insertmode  = NOT_SET_VALUES;
498   v->bstash.insertmode = NOT_SET_VALUES;
499   /* create the stashes. The block-size for bstash is set later when
500      VecSetValuesBlocked is called.
501   */
502   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
503   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
504 
505 #if defined(PETSC_HAVE_MATLAB)
506   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
507   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
508 #endif
509   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
510   PetscFunctionReturn(PETSC_SUCCESS);
511 }
512 
513 /*MC
514    VECMPI - VECMPI = "mpi" - The basic parallel vector
515 
516    Options Database Key:
517 . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`
518 
519   Level: beginner
520 
521 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
522 M*/
523 
524 PetscErrorCode VecCreate_MPI(Vec vv)
525 {
526   PetscFunctionBegin;
527   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
528   PetscFunctionReturn(PETSC_SUCCESS);
529 }
530 
531 /*MC
532    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process
533 
534    Options Database Key:
535 . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`
536 
537   Level: beginner
538 
539 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
540 M*/
541 
542 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
543 {
544   PetscMPIInt size;
545 
546   PetscFunctionBegin;
547   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
548   if (size == 1) {
549     PetscCall(VecSetType(v, VECSEQ));
550   } else {
551     PetscCall(VecSetType(v, VECMPI));
552   }
553   PetscFunctionReturn(PETSC_SUCCESS);
554 }
555 
556 /*@C
557    VecCreateMPIWithArray - Creates a parallel, array-style vector,
558    where the user provides the array space to store the vector values.
559 
560    Collective
561 
562    Input Parameters:
563 +  comm  - the MPI communicator to use
564 .  bs    - block size, same meaning as `VecSetBlockSize()`
565 .  n     - local vector length, cannot be `PETSC_DECIDE`
566 .  N     - global vector length (or `PETSC_DETERMINE` to have calculated)
567 -  array - the user provided array to store the vector values
568 
569    Output Parameter:
570 .  vv - the vector
571 
572    Level: intermediate
573 
574    Notes:
575    Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
576    same type as an existing vector.
577 
578    If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
579    at a later stage to SET the array for storing the vector values.
580 
581    PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.
582 
583    The user should not free `array` until the vector is destroyed.
584 
585 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
586           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
587 @*/
588 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
589 {
590   PetscFunctionBegin;
591   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
592   PetscCall(PetscSplitOwnership(comm, &n, &N));
593   PetscCall(VecCreate(comm, vv));
594   PetscCall(VecSetSizes(*vv, n, N));
595   PetscCall(VecSetBlockSize(*vv, bs));
596   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
597   PetscFunctionReturn(PETSC_SUCCESS);
598 }
599 
600 /*@C
601    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
602    the caller allocates the array space.
603 
604    Collective
605 
606    Input Parameters:
607 +  comm - the MPI communicator to use
608 .  n - local vector length
609 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
610 .  nghost - number of local ghost points
611 .  ghosts - global indices of ghost points (or `NULL` if not needed), these do not need to be in increasing order (sorted)
612 -  array - the space to store the vector values (as long as n + nghost)
613 
614    Output Parameter:
615 .  vv - the global vector representation (without ghost points as part of vector)
616 
617    Level: advanced
618 
619    Notes:
620    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
621    of the vector.
622 
623    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
624 
625 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
626           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
627           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
628 @*/
629 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
630 {
631   Vec_MPI               *w;
632   PetscScalar           *larray;
633   IS                     from, to;
634   ISLocalToGlobalMapping ltog;
635   PetscInt               rstart, i, *indices;
636 
637   PetscFunctionBegin;
638   *vv = NULL;
639 
640   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
641   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
642   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
643   PetscCall(PetscSplitOwnership(comm, &n, &N));
644   /* Create global representation */
645   PetscCall(VecCreate(comm, vv));
646   PetscCall(VecSetSizes(*vv, n, N));
647   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
648   w = (Vec_MPI *)(*vv)->data;
649   /* Create local representation */
650   PetscCall(VecGetArray(*vv, &larray));
651   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
652   PetscCall(VecRestoreArray(*vv, &larray));
653 
654   /*
655        Create scatter context for scattering (updating) ghost values
656   */
657   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
658   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
659   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
660   PetscCall(ISDestroy(&to));
661   PetscCall(ISDestroy(&from));
662 
663   /* set local to global mapping for ghosted vector */
664   PetscCall(PetscMalloc1(n + nghost, &indices));
665   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
666   for (i = 0; i < n; i++) indices[i] = rstart + i;
667   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
668   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
669   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
670   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
671   PetscFunctionReturn(PETSC_SUCCESS);
672 }
673 
674 /*@
675    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
676 
677    Collective
678 
679    Input Parameters:
680 +  comm - the MPI communicator to use
681 .  n - local vector length
682 .  N - global vector length (or `PETSC_DETERMEINE` to have calculated if `n` is given)
683 .  nghost - number of local ghost points
684 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
685 
686    Output Parameter:
687 .  vv - the global vector representation (without ghost points as part of vector)
688 
689    Level: advanced
690 
691    Notes:
692    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
693    of the vector.
694 
695    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
696 
697 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
698           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
699           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
700           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
701 
702 @*/
703 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
704 {
705   PetscFunctionBegin;
706   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
707   PetscFunctionReturn(PETSC_SUCCESS);
708 }
709 
710 /*@
711    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
712 
713    Collective
714 
715    Input Parameters:
716 +  vv - the MPI vector
717 .  nghost - number of local ghost points
718 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
719 
720    Level: advanced
721 
722    Notes:
723    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
724    of the vector.
725 
726    This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
727 
728    You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).
729 
730 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
731           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
732           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
733           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
734 @*/
735 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
736 {
737   PetscBool flg;
738 
739   PetscFunctionBegin;
740   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
741   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
742   if (flg) {
743     PetscInt               n, N;
744     Vec_MPI               *w;
745     PetscScalar           *larray;
746     IS                     from, to;
747     ISLocalToGlobalMapping ltog;
748     PetscInt               rstart, i, *indices;
749     MPI_Comm               comm;
750 
751     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
752     n = vv->map->n;
753     N = vv->map->N;
754     PetscUseTypeMethod(vv, destroy);
755     PetscCall(VecSetSizes(vv, n, N));
756     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
757     w = (Vec_MPI *)(vv)->data;
758     /* Create local representation */
759     PetscCall(VecGetArray(vv, &larray));
760     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
761     PetscCall(VecRestoreArray(vv, &larray));
762 
763     /*
764      Create scatter context for scattering (updating) ghost values
765      */
766     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
767     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
768     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
769     PetscCall(ISDestroy(&to));
770     PetscCall(ISDestroy(&from));
771 
772     /* set local to global mapping for ghosted vector */
773     PetscCall(PetscMalloc1(n + nghost, &indices));
774     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
775 
776     for (i = 0; i < n; i++) indices[i] = rstart + i;
777     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
778 
779     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
780     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
781     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
782   } else {
783     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
784     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
785   }
786   PetscFunctionReturn(PETSC_SUCCESS);
787 }
788 
789 /* ------------------------------------------------------------------------------------------*/
790 /*@C
791    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
792    the caller allocates the array space. Indices in the ghost region are based on blocks.
793 
794    Collective
795 
796    Input Parameters:
797 +  comm - the MPI communicator to use
798 .  bs - block size
799 .  n - local vector length
800 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
801 .  nghost - number of local ghost blocks
802 .  ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
803 -  array - the space to store the vector values (as long as `n + nghost*bs`)
804 
805    Output Parameter:
806 .  vv - the global vector representation (without ghost points as part of vector)
807 
808    Level: advanced
809 
810    Notes:
811    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
812    of the vector.
813 
814    n is the local vector size (total local size not the number of blocks) while nghost
815    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
816    portion is bs*nghost
817 
818 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
819           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
820           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
821 @*/
822 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
823 {
824   Vec_MPI               *w;
825   PetscScalar           *larray;
826   IS                     from, to;
827   ISLocalToGlobalMapping ltog;
828   PetscInt               rstart, i, nb, *indices;
829 
830   PetscFunctionBegin;
831   *vv = NULL;
832 
833   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
834   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
835   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
836   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
837   PetscCall(PetscSplitOwnership(comm, &n, &N));
838   /* Create global representation */
839   PetscCall(VecCreate(comm, vv));
840   PetscCall(VecSetSizes(*vv, n, N));
841   PetscCall(VecSetBlockSize(*vv, bs));
842   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
843   w = (Vec_MPI *)(*vv)->data;
844   /* Create local representation */
845   PetscCall(VecGetArray(*vv, &larray));
846   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
847   PetscCall(VecRestoreArray(*vv, &larray));
848 
849   /*
850        Create scatter context for scattering (updating) ghost values
851   */
852   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
853   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
854   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
855   PetscCall(ISDestroy(&to));
856   PetscCall(ISDestroy(&from));
857 
858   /* set local to global mapping for ghosted vector */
859   nb = n / bs;
860   PetscCall(PetscMalloc1(nb + nghost, &indices));
861   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
862   rstart = rstart / bs;
863 
864   for (i = 0; i < nb; i++) indices[i] = rstart + i;
865   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
866 
867   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
868   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
869   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
870   PetscFunctionReturn(PETSC_SUCCESS);
871 }
872 
873 /*@
874    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
875         The indicing of the ghost points is done with blocks.
876 
877    Collective
878 
879    Input Parameters:
880 +  comm - the MPI communicator to use
881 .  bs - the block size
882 .  n - local vector length
883 .  N - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
884 .  nghost - number of local ghost blocks
885 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
886 
887    Output Parameter:
888 .  vv - the global vector representation (without ghost points as part of vector)
889 
890    Level: advanced
891 
892    Notes:
893    Use `VecGhostGetLocalForm()` to access the local, ghosted representation
894    of the vector.
895 
896    `n` is the local vector size (total local size not the number of blocks) while `nghost`
897    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
898    portion is `bs*nghost`
899 
900 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
901           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
902           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
903 @*/
904 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
905 {
906   PetscFunctionBegin;
907   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
908   PetscFunctionReturn(PETSC_SUCCESS);
909 }
910