xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision aa62479103f8b3908dd743e7087487f9af829268)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
9 
10 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
11 {
12   Vec_MPI *v = (Vec_MPI *)vin->data;
13 
14   PetscFunctionBegin;
15   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
16   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
17   v->array         = (PetscScalar *)a;
18   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
19   PetscFunctionReturn(PETSC_SUCCESS);
20 }
21 
22 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
23 {
24   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
25   PetscScalar *array;
26 
27   PetscFunctionBegin;
28   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
29   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
30 
31   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
32   vw = (Vec_MPI *)(*v)->data;
33   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(*win->ops)));
34 
35   /* save local representation of the parallel vector (and scatter) if it exists */
36   if (w->localrep) {
37     PetscCall(VecGetArray(*v, &array));
38     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
39     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(*w->localrep->ops)));
40     PetscCall(VecRestoreArray(*v, &array));
41 
42     vw->localupdate = w->localupdate;
43     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
44   }
45 
46   /* New vector should inherit stashing property of parent */
47   (*v)->stash.donotstash   = win->stash.donotstash;
48   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
49 
50   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
51   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
52 
53   (*v)->map->bs   = PetscAbs(win->map->bs);
54   (*v)->bstash.bs = win->bstash.bs;
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
59 {
60   Vec_MPI *v = (Vec_MPI *)V->data;
61 
62   PetscFunctionBegin;
63   switch (op) {
64   case VEC_IGNORE_OFF_PROC_ENTRIES:
65     V->stash.donotstash = flag;
66     break;
67   case VEC_IGNORE_NEGATIVE_INDICES:
68     V->stash.ignorenegidx = flag;
69     break;
70   case VEC_SUBSET_OFF_PROC_ENTRIES:
71     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
72     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
73       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
74       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
75     }
76     break;
77   }
78 
79   PetscFunctionReturn(PETSC_SUCCESS);
80 }
81 
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI *v = (Vec_MPI *)vin->data;
85 
86   PetscFunctionBegin;
87   v->array         = v->unplacedarray;
88   v->unplacedarray = NULL;
89   if (v->localrep) PetscCall(VecResetArray(v->localrep));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
94 {
95   Vec                X   = (Vec)ctx;
96   Vec_MPI           *x   = (Vec_MPI *)X->data;
97   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
98   PetscInt           bs  = X->map->bs;
99 
100   PetscFunctionBegin;
101   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
102      messages can be empty, but we have to send them this time if we sent them before because the
103      receiver is expecting them.
104    */
105   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
106     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
107     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
108   }
109   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
110     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
111     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
112   }
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
117 {
118   Vec                X   = (Vec)ctx;
119   Vec_MPI           *x   = (Vec_MPI *)X->data;
120   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
121   PetscInt           bs  = X->map->bs;
122   VecAssemblyFrame  *frame;
123 
124   PetscFunctionBegin;
125   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
126 
127   if (hdr->count) {
128     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
129     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
130     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
131     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
132     frame->pendings = 2;
133   } else {
134     frame->ints     = NULL;
135     frame->scalars  = NULL;
136     frame->pendings = 0;
137   }
138 
139   if (hdr->bcount) {
140     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
141     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
142     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
143     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
144     frame->pendingb = 2;
145   } else {
146     frame->intb     = NULL;
147     frame->scalarb  = NULL;
148     frame->pendingb = 0;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
154 {
155   Vec_MPI *x = (Vec_MPI *)X->data;
156   MPI_Comm comm;
157   PetscInt i, j, jb, bs;
158 
159   PetscFunctionBegin;
160   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
161 
162   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
163   PetscCall(VecGetBlockSize(X, &bs));
164   if (PetscDefined(USE_DEBUG)) {
165     InsertMode addv;
166     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
167     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
168   }
169   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
170 
171   PetscCall(VecStashSortCompress_Private(&X->stash));
172   PetscCall(VecStashSortCompress_Private(&X->bstash));
173 
174   if (!x->sendranks) {
175     PetscMPIInt nowners, bnowners, *owners, *bowners;
176     PetscInt    ntmp;
177     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
178     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
179     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
180     x->nsendranks = ntmp;
181     PetscCall(PetscFree(owners));
182     PetscCall(PetscFree(bowners));
183     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
184     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
185   }
186   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
187     PetscMPIInt rank         = x->sendranks[i];
188     x->sendhdr[i].insertmode = X->stash.insertmode;
189     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
190      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
191      * there is nothing new to send, so that size-zero messages get sent instead. */
192     x->sendhdr[i].count = 0;
193     if (X->stash.n) {
194       x->sendptrs[i].ints    = &X->stash.idx[j];
195       x->sendptrs[i].scalars = &X->stash.array[j];
196       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
197     }
198     x->sendhdr[i].bcount = 0;
199     if (X->bstash.n) {
200       x->sendptrs[i].intb    = &X->bstash.idx[jb];
201       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
202       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
203     }
204   }
205 
206   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
207   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
208   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
209   if (x->first_assembly_done) { /* this is not the first assembly */
210     PetscMPIInt tag[4];
211     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
212     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
213     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
214     x->use_status = PETSC_TRUE;
215   } else { /* First time assembly */
216     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
217     x->use_status = PETSC_FALSE;
218   }
219 
220   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
221      This line says when assembly_subset is set, then we mark that the first assembly is done.
222    */
223   x->first_assembly_done = x->assembly_subset;
224 
225   {
226     PetscInt nstash, reallocs;
227     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
228     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
229     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
230     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
231   }
232   PetscFunctionReturn(PETSC_SUCCESS);
233 }
234 
235 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
236 {
237   Vec_MPI          *x  = (Vec_MPI *)X->data;
238   PetscInt          bs = X->map->bs;
239   PetscMPIInt       npending, *some_indices, r;
240   MPI_Status       *some_statuses;
241   PetscScalar      *xarray;
242   VecAssemblyFrame *frame;
243 
244   PetscFunctionBegin;
245   if (X->stash.donotstash) {
246     X->stash.insertmode  = NOT_SET_VALUES;
247     X->bstash.insertmode = NOT_SET_VALUES;
248     PetscFunctionReturn(PETSC_SUCCESS);
249   }
250 
251   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
252   PetscCall(VecGetArray(X, &xarray));
253   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
254   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
255   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
256   while (npending > 0) {
257     PetscMPIInt ndone = 0, ii;
258     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
259      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
260      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
261      * subsequent assembly can set a proper subset of the values. */
262     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
263     for (ii = 0; ii < ndone; ii++) {
264       PetscInt     i     = some_indices[ii] / 4, j, k;
265       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
266       PetscInt    *recvint;
267       PetscScalar *recvscalar;
268       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
269       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
270       npending--;
271       if (!blockmsg) { /* Scalar stash */
272         PetscMPIInt count;
273         if (--frame[i].pendings > 0) continue;
274         if (x->use_status) {
275           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
276         } else count = x->recvhdr[i].count;
277         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
278           PetscInt loc = *recvint - X->map->rstart;
279           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
280           switch (imode) {
281           case ADD_VALUES:
282             xarray[loc] += *recvscalar++;
283             break;
284           case INSERT_VALUES:
285             xarray[loc] = *recvscalar++;
286             break;
287           default:
288             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
289           }
290         }
291       } else { /* Block stash */
292         PetscMPIInt count;
293         if (--frame[i].pendingb > 0) continue;
294         if (x->use_status) {
295           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
296           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
297         } else count = x->recvhdr[i].bcount;
298         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
299           PetscInt loc = (*recvint) * bs - X->map->rstart;
300           switch (imode) {
301           case ADD_VALUES:
302             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
306             break;
307           default:
308             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
309           }
310         }
311       }
312     }
313   }
314   PetscCall(VecRestoreArray(X, &xarray));
315   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
316   PetscCall(PetscFree2(some_indices, some_statuses));
317   if (x->assembly_subset) {
318     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
319     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
320   } else {
321     PetscCall(VecAssemblyReset_MPI(X));
322   }
323 
324   X->stash.insertmode  = NOT_SET_VALUES;
325   X->bstash.insertmode = NOT_SET_VALUES;
326   PetscCall(VecStashScatterEnd_Private(&X->stash));
327   PetscCall(VecStashScatterEnd_Private(&X->bstash));
328   PetscFunctionReturn(PETSC_SUCCESS);
329 }
330 
331 PetscErrorCode VecAssemblyReset_MPI(Vec X)
332 {
333   Vec_MPI *x = (Vec_MPI *)X->data;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscFree(x->sendreqs));
337   PetscCall(PetscFree(x->recvreqs));
338   PetscCall(PetscFree(x->sendranks));
339   PetscCall(PetscFree(x->recvranks));
340   PetscCall(PetscFree(x->sendhdr));
341   PetscCall(PetscFree(x->recvhdr));
342   PetscCall(PetscFree(x->sendptrs));
343   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
344   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
350 {
351 #if !defined(PETSC_HAVE_MPIUNI)
352   PetscBool flg = PETSC_FALSE, set;
353 
354   PetscFunctionBegin;
355   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
356   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
357   if (set) {
358     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
359     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
360   }
361   PetscOptionsHeadEnd();
362 #else
363   PetscFunctionBegin;
364   X->ops->assemblybegin = VecAssemblyBegin_MPI;
365   X->ops->assemblyend   = VecAssemblyEnd_MPI;
366 #endif
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
371                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
372                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
373                                PetscDesignatedInitializer(dot, VecDot_MPI),
374                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
375                                PetscDesignatedInitializer(norm, VecNorm_MPI),
376                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
377                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
378                                PetscDesignatedInitializer(scale, VecScale_Seq),
379                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
380                                PetscDesignatedInitializer(set, VecSet_Seq),
381                                PetscDesignatedInitializer(swap, VecSwap_Seq),
382                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
383                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
384                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
385                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
386                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
387                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
388                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
389                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
390                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
391                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
392                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
393                                PetscDesignatedInitializer(getarray, NULL),
394                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
395                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
396                                PetscDesignatedInitializer(restorearray, NULL),
397                                PetscDesignatedInitializer(max, VecMax_MPI),
398                                PetscDesignatedInitializer(min, VecMin_MPI),
399                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
400                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
401                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
402                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
403                                PetscDesignatedInitializer(view, VecView_MPI),
404                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
405                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
406                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
407                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
408                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
409                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
410                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
411                                PetscDesignatedInitializer(load, VecLoad_Default),
412                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
413                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
414                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
415                                PetscDesignatedInitializer(setvalueslocal, NULL),
416                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
417                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
418                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
419                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
420                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
421                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
422                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
423                                PetscDesignatedInitializer(sqrt, NULL),
424                                PetscDesignatedInitializer(abs, NULL),
425                                PetscDesignatedInitializer(exp, NULL),
426                                PetscDesignatedInitializer(log, NULL),
427                                PetscDesignatedInitializer(shift, NULL),
428                                PetscDesignatedInitializer(create, NULL), /* really? */
429                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
430                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
431                                PetscDesignatedInitializer(dotnorm2, NULL),
432                                PetscDesignatedInitializer(getsubvector, NULL),
433                                PetscDesignatedInitializer(restoresubvector, NULL),
434                                PetscDesignatedInitializer(getarrayread, NULL),
435                                PetscDesignatedInitializer(restorearrayread, NULL),
436                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
437                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
438                                PetscDesignatedInitializer(viewnative, VecView_MPI),
439                                PetscDesignatedInitializer(loadnative, NULL),
440                                PetscDesignatedInitializer(createlocalvector, NULL),
441                                PetscDesignatedInitializer(getlocalvector, NULL),
442                                PetscDesignatedInitializer(restorelocalvector, NULL),
443                                PetscDesignatedInitializer(getlocalvectorread, NULL),
444                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
445                                PetscDesignatedInitializer(bindtocpu, NULL),
446                                PetscDesignatedInitializer(getarraywrite, NULL),
447                                PetscDesignatedInitializer(restorearraywrite, NULL),
448                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
450                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
453                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(concatenate, NULL),
455                                PetscDesignatedInitializer(sum, NULL),
456                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
457                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
458 
459 /*
460     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
461     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
462     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
463 
464     If alloc is true and array is NULL then this routine allocates the space, otherwise
465     no space is allocated.
466 */
467 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
468 {
469   Vec_MPI *s;
470 
471   PetscFunctionBegin;
472   PetscCall(PetscNew(&s));
473   v->data = (void *)s;
474   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
475   s->nghost      = nghost;
476   v->petscnative = PETSC_TRUE;
477   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
478 
479   PetscCall(PetscLayoutSetUp(v->map));
480 
481   s->array           = (PetscScalar *)array;
482   s->array_allocated = NULL;
483   if (alloc && !array) {
484     PetscInt n = v->map->n + nghost;
485     PetscCall(PetscCalloc1(n, &s->array));
486     s->array_allocated = s->array;
487   }
488 
489   /* By default parallel vectors do not have local representation */
490   s->localrep    = NULL;
491   s->localupdate = NULL;
492 
493   v->stash.insertmode  = NOT_SET_VALUES;
494   v->bstash.insertmode = NOT_SET_VALUES;
495   /* create the stashes. The block-size for bstash is set later when
496      VecSetValuesBlocked is called.
497   */
498   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
499   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
500 
501 #if defined(PETSC_HAVE_MATLAB)
502   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
503   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
504 #endif
505   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
506   PetscFunctionReturn(PETSC_SUCCESS);
507 }
508 
509 /*MC
510    VECMPI - VECMPI = "mpi" - The basic parallel vector
511 
512    Options Database Keys:
513 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
514 
515   Level: beginner
516 
517 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
518 M*/
519 
520 PetscErrorCode VecCreate_MPI(Vec vv)
521 {
522   PetscFunctionBegin;
523   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
524   PetscFunctionReturn(PETSC_SUCCESS);
525 }
526 
527 /*MC
528    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
529 
530    Options Database Keys:
531 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
532 
533   Level: beginner
534 
535 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
536 M*/
537 
538 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
539 {
540   PetscMPIInt size;
541 
542   PetscFunctionBegin;
543   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
544   if (size == 1) {
545     PetscCall(VecSetType(v, VECSEQ));
546   } else {
547     PetscCall(VecSetType(v, VECMPI));
548   }
549   PetscFunctionReturn(PETSC_SUCCESS);
550 }
551 
552 /*@C
553    VecCreateMPIWithArray - Creates a parallel, array-style vector,
554    where the user provides the array space to store the vector values.
555 
556    Collective
557 
558    Input Parameters:
559 +  comm  - the MPI communicator to use
560 .  bs    - block size, same meaning as VecSetBlockSize()
561 .  n     - local vector length, cannot be PETSC_DECIDE
562 .  N     - global vector length (or PETSC_DECIDE to have calculated)
563 -  array - the user provided array to store the vector values
564 
565    Output Parameter:
566 .  vv - the vector
567 
568    Notes:
569    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
570    same type as an existing vector.
571 
572    If the user-provided array is NULL, then VecPlaceArray() can be used
573    at a later stage to SET the array for storing the vector values.
574 
575    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
576    The user should not free the array until the vector is destroyed.
577 
578    Level: intermediate
579 
580 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
581           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
582 
583 @*/
584 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
585 {
586   PetscFunctionBegin;
587   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
588   PetscCall(PetscSplitOwnership(comm, &n, &N));
589   PetscCall(VecCreate(comm, vv));
590   PetscCall(VecSetSizes(*vv, n, N));
591   PetscCall(VecSetBlockSize(*vv, bs));
592   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
593   PetscFunctionReturn(PETSC_SUCCESS);
594 }
595 
596 /*@C
597    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
598    the caller allocates the array space.
599 
600    Collective
601 
602    Input Parameters:
603 +  comm - the MPI communicator to use
604 .  n - local vector length
605 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
606 .  nghost - number of local ghost points
607 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
608 -  array - the space to store the vector values (as long as n + nghost)
609 
610    Output Parameter:
611 .  vv - the global vector representation (without ghost points as part of vector)
612 
613    Notes:
614    Use VecGhostGetLocalForm() to access the local, ghosted representation
615    of the vector.
616 
617    This also automatically sets the ISLocalToGlobalMapping() for this vector.
618 
619    Level: advanced
620 
621 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
622           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
623           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
624 
625 @*/
626 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
627 {
628   Vec_MPI               *w;
629   PetscScalar           *larray;
630   IS                     from, to;
631   ISLocalToGlobalMapping ltog;
632   PetscInt               rstart, i, *indices;
633 
634   PetscFunctionBegin;
635   *vv = NULL;
636 
637   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
638   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
639   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
640   PetscCall(PetscSplitOwnership(comm, &n, &N));
641   /* Create global representation */
642   PetscCall(VecCreate(comm, vv));
643   PetscCall(VecSetSizes(*vv, n, N));
644   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
645   w = (Vec_MPI *)(*vv)->data;
646   /* Create local representation */
647   PetscCall(VecGetArray(*vv, &larray));
648   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
649   PetscCall(VecRestoreArray(*vv, &larray));
650 
651   /*
652        Create scatter context for scattering (updating) ghost values
653   */
654   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
655   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
656   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
657   PetscCall(ISDestroy(&to));
658   PetscCall(ISDestroy(&from));
659 
660   /* set local to global mapping for ghosted vector */
661   PetscCall(PetscMalloc1(n + nghost, &indices));
662   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
663   for (i = 0; i < n; i++) indices[i] = rstart + i;
664   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
665   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
666   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
667   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
668   PetscFunctionReturn(PETSC_SUCCESS);
669 }
670 
671 /*@
672    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
673 
674    Collective
675 
676    Input Parameters:
677 +  comm - the MPI communicator to use
678 .  n - local vector length
679 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
680 .  nghost - number of local ghost points
681 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
682 
683    Output Parameter:
684 .  vv - the global vector representation (without ghost points as part of vector)
685 
686    Notes:
687    Use VecGhostGetLocalForm() to access the local, ghosted representation
688    of the vector.
689 
690    This also automatically sets the ISLocalToGlobalMapping() for this vector.
691 
692    Level: advanced
693 
694 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
695           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
696           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
697           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
698 
699 @*/
700 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
701 {
702   PetscFunctionBegin;
703   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
704   PetscFunctionReturn(PETSC_SUCCESS);
705 }
706 
707 /*@
708    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
709 
710    Collective
711 
712    Input Parameters:
713 +  vv - the MPI vector
714 .  nghost - number of local ghost points
715 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
716 
717    Notes:
718    Use VecGhostGetLocalForm() to access the local, ghosted representation
719    of the vector.
720 
721    This also automatically sets the ISLocalToGlobalMapping() for this vector.
722 
723    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
724 
725    Level: advanced
726 
727 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
728           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
729           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
730           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
731 
732 @*/
733 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
734 {
735   PetscBool flg;
736 
737   PetscFunctionBegin;
738   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
739   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
740   if (flg) {
741     PetscInt               n, N;
742     Vec_MPI               *w;
743     PetscScalar           *larray;
744     IS                     from, to;
745     ISLocalToGlobalMapping ltog;
746     PetscInt               rstart, i, *indices;
747     MPI_Comm               comm;
748 
749     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
750     n = vv->map->n;
751     N = vv->map->N;
752     PetscUseTypeMethod(vv, destroy);
753     PetscCall(VecSetSizes(vv, n, N));
754     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
755     w = (Vec_MPI *)(vv)->data;
756     /* Create local representation */
757     PetscCall(VecGetArray(vv, &larray));
758     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
759     PetscCall(VecRestoreArray(vv, &larray));
760 
761     /*
762      Create scatter context for scattering (updating) ghost values
763      */
764     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
765     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
766     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
767     PetscCall(ISDestroy(&to));
768     PetscCall(ISDestroy(&from));
769 
770     /* set local to global mapping for ghosted vector */
771     PetscCall(PetscMalloc1(n + nghost, &indices));
772     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
773 
774     for (i = 0; i < n; i++) indices[i] = rstart + i;
775     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
776 
777     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
778     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
779     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
780   } else {
781     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
782     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
783   }
784   PetscFunctionReturn(PETSC_SUCCESS);
785 }
786 
787 /* ------------------------------------------------------------------------------------------*/
788 /*@C
789    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
790    the caller allocates the array space. Indices in the ghost region are based on blocks.
791 
792    Collective
793 
794    Input Parameters:
795 +  comm - the MPI communicator to use
796 .  bs - block size
797 .  n - local vector length
798 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
799 .  nghost - number of local ghost blocks
800 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
801 -  array - the space to store the vector values (as long as n + nghost*bs)
802 
803    Output Parameter:
804 .  vv - the global vector representation (without ghost points as part of vector)
805 
806    Notes:
807    Use VecGhostGetLocalForm() to access the local, ghosted representation
808    of the vector.
809 
810    n is the local vector size (total local size not the number of blocks) while nghost
811    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
812    portion is bs*nghost
813 
814    Level: advanced
815 
816 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
817           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
818           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
819 
820 @*/
821 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
822 {
823   Vec_MPI               *w;
824   PetscScalar           *larray;
825   IS                     from, to;
826   ISLocalToGlobalMapping ltog;
827   PetscInt               rstart, i, nb, *indices;
828 
829   PetscFunctionBegin;
830   *vv = NULL;
831 
832   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
833   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
834   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
835   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
836   PetscCall(PetscSplitOwnership(comm, &n, &N));
837   /* Create global representation */
838   PetscCall(VecCreate(comm, vv));
839   PetscCall(VecSetSizes(*vv, n, N));
840   PetscCall(VecSetBlockSize(*vv, bs));
841   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
842   w = (Vec_MPI *)(*vv)->data;
843   /* Create local representation */
844   PetscCall(VecGetArray(*vv, &larray));
845   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
846   PetscCall(VecRestoreArray(*vv, &larray));
847 
848   /*
849        Create scatter context for scattering (updating) ghost values
850   */
851   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
852   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
853   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
854   PetscCall(ISDestroy(&to));
855   PetscCall(ISDestroy(&from));
856 
857   /* set local to global mapping for ghosted vector */
858   nb = n / bs;
859   PetscCall(PetscMalloc1(nb + nghost, &indices));
860   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
861   rstart = rstart / bs;
862 
863   for (i = 0; i < nb; i++) indices[i] = rstart + i;
864   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
865 
866   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
867   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
868   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
869   PetscFunctionReturn(PETSC_SUCCESS);
870 }
871 
872 /*@
873    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
874         The indicing of the ghost points is done with blocks.
875 
876    Collective
877 
878    Input Parameters:
879 +  comm - the MPI communicator to use
880 .  bs - the block size
881 .  n - local vector length
882 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
883 .  nghost - number of local ghost blocks
884 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
885 
886    Output Parameter:
887 .  vv - the global vector representation (without ghost points as part of vector)
888 
889    Notes:
890    Use VecGhostGetLocalForm() to access the local, ghosted representation
891    of the vector.
892 
893    n is the local vector size (total local size not the number of blocks) while nghost
894    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
895    portion is bs*nghost
896 
897    Level: advanced
898 
899 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
900           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
901           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
902 
903 @*/
904 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
905 {
906   PetscFunctionBegin;
907   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
908   PetscFunctionReturn(PETSC_SUCCESS);
909 }
910