xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 29e0a8059c4f386829f8eba944f135cd960c2d7a)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 PETSC_INTERN PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
9 
10 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
11 {
12   Vec_MPI *v = (Vec_MPI *)vin->data;
13 
14   PetscFunctionBegin;
15   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
16   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
17   v->array         = (PetscScalar *)a;
18   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
19   PetscFunctionReturn(PETSC_SUCCESS);
20 }
21 
22 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
23 {
24   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
25   PetscScalar *array;
26 
27   PetscFunctionBegin;
28   PetscCall(VecCreateWithLayout_Private(win->map, v));
29 
30   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
31   vw           = (Vec_MPI *)(*v)->data;
32   (*v)->ops[0] = win->ops[0];
33 
34   /* save local representation of the parallel vector (and scatter) if it exists */
35   if (w->localrep) {
36     PetscCall(VecGetArray(*v, &array));
37     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
38     vw->localrep->ops[0] = w->localrep->ops[0];
39     PetscCall(VecRestoreArray(*v, &array));
40 
41     vw->localupdate = w->localupdate;
42     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
43   }
44 
45   /* New vector should inherit stashing property of parent */
46   (*v)->stash.donotstash   = win->stash.donotstash;
47   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
48 
49   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
50   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
51 
52   (*v)->bstash.bs = win->bstash.bs;
53   PetscFunctionReturn(PETSC_SUCCESS);
54 }
55 
56 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
57 {
58   Vec_MPI *v = (Vec_MPI *)V->data;
59 
60   PetscFunctionBegin;
61   switch (op) {
62   case VEC_IGNORE_OFF_PROC_ENTRIES:
63     V->stash.donotstash = flag;
64     break;
65   case VEC_IGNORE_NEGATIVE_INDICES:
66     V->stash.ignorenegidx = flag;
67     break;
68   case VEC_SUBSET_OFF_PROC_ENTRIES:
69     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
70     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
71       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
72       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
73     }
74     break;
75   }
76 
77   PetscFunctionReturn(PETSC_SUCCESS);
78 }
79 
80 PetscErrorCode VecResetArray_MPI(Vec vin)
81 {
82   Vec_MPI *v = (Vec_MPI *)vin->data;
83 
84   PetscFunctionBegin;
85   v->array         = v->unplacedarray;
86   v->unplacedarray = NULL;
87   if (v->localrep) PetscCall(VecResetArray(v->localrep));
88   PetscFunctionReturn(PETSC_SUCCESS);
89 }
90 
91 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
92 {
93   Vec                X   = (Vec)ctx;
94   Vec_MPI           *x   = (Vec_MPI *)X->data;
95   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
96   PetscInt           bs  = X->map->bs;
97 
98   PetscFunctionBegin;
99   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
100      messages can be empty, but we have to send them this time if we sent them before because the
101      receiver is expecting them.
102    */
103   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
104     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
105     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
106   }
107   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
108     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
109     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
110   }
111   PetscFunctionReturn(PETSC_SUCCESS);
112 }
113 
114 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
115 {
116   Vec                X   = (Vec)ctx;
117   Vec_MPI           *x   = (Vec_MPI *)X->data;
118   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
119   PetscInt           bs  = X->map->bs;
120   VecAssemblyFrame  *frame;
121 
122   PetscFunctionBegin;
123   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
124 
125   if (hdr->count) {
126     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
127     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
128     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
129     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
130     frame->pendings = 2;
131   } else {
132     frame->ints     = NULL;
133     frame->scalars  = NULL;
134     frame->pendings = 0;
135   }
136 
137   if (hdr->bcount) {
138     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
139     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
140     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
141     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
142     frame->pendingb = 2;
143   } else {
144     frame->intb     = NULL;
145     frame->scalarb  = NULL;
146     frame->pendingb = 0;
147   }
148   PetscFunctionReturn(PETSC_SUCCESS);
149 }
150 
151 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
152 {
153   Vec_MPI *x = (Vec_MPI *)X->data;
154   MPI_Comm comm;
155   PetscInt i, j, jb, bs;
156 
157   PetscFunctionBegin;
158   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
159 
160   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
161   PetscCall(VecGetBlockSize(X, &bs));
162   if (PetscDefined(USE_DEBUG)) {
163     InsertMode addv;
164     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
165     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
166   }
167   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
168 
169   PetscCall(VecStashSortCompress_Private(&X->stash));
170   PetscCall(VecStashSortCompress_Private(&X->bstash));
171 
172   if (!x->sendranks) {
173     PetscMPIInt nowners, bnowners, *owners, *bowners;
174     PetscInt    ntmp;
175     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
176     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
177     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
178     x->nsendranks = ntmp;
179     PetscCall(PetscFree(owners));
180     PetscCall(PetscFree(bowners));
181     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
182     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
183   }
184   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
185     PetscMPIInt rank         = x->sendranks[i];
186     x->sendhdr[i].insertmode = X->stash.insertmode;
187     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
188      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
189      * there is nothing new to send, so that size-zero messages get sent instead. */
190     x->sendhdr[i].count = 0;
191     if (X->stash.n) {
192       x->sendptrs[i].ints    = &X->stash.idx[j];
193       x->sendptrs[i].scalars = &X->stash.array[j];
194       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
195     }
196     x->sendhdr[i].bcount = 0;
197     if (X->bstash.n) {
198       x->sendptrs[i].intb    = &X->bstash.idx[jb];
199       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
200       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
201     }
202   }
203 
204   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
205   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
206   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
207   if (x->first_assembly_done) { /* this is not the first assembly */
208     PetscMPIInt tag[4];
209     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
210     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
211     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
212     x->use_status = PETSC_TRUE;
213   } else { /* First time assembly */
214     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
215     x->use_status = PETSC_FALSE;
216   }
217 
218   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
219      This line says when assembly_subset is set, then we mark that the first assembly is done.
220    */
221   x->first_assembly_done = x->assembly_subset;
222 
223   {
224     PetscInt nstash, reallocs;
225     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
226     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
227     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
228     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
229   }
230   PetscFunctionReturn(PETSC_SUCCESS);
231 }
232 
233 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
234 {
235   Vec_MPI          *x  = (Vec_MPI *)X->data;
236   PetscInt          bs = X->map->bs;
237   PetscMPIInt       npending, *some_indices, r;
238   MPI_Status       *some_statuses;
239   PetscScalar      *xarray;
240   VecAssemblyFrame *frame;
241 
242   PetscFunctionBegin;
243   if (X->stash.donotstash) {
244     X->stash.insertmode  = NOT_SET_VALUES;
245     X->bstash.insertmode = NOT_SET_VALUES;
246     PetscFunctionReturn(PETSC_SUCCESS);
247   }
248 
249   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
250   PetscCall(VecGetArray(X, &xarray));
251   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
252   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
253   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
254   while (npending > 0) {
255     PetscMPIInt ndone = 0, ii;
256     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
257      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
258      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
259      * subsequent assembly can set a proper subset of the values. */
260     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
261     for (ii = 0; ii < ndone; ii++) {
262       PetscInt     i     = some_indices[ii] / 4, j, k;
263       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
264       PetscInt    *recvint;
265       PetscScalar *recvscalar;
266       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
267       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
268       npending--;
269       if (!blockmsg) { /* Scalar stash */
270         PetscMPIInt count;
271         if (--frame[i].pendings > 0) continue;
272         if (x->use_status) {
273           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
274         } else count = x->recvhdr[i].count;
275         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
276           PetscInt loc = *recvint - X->map->rstart;
277           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
278           switch (imode) {
279           case ADD_VALUES:
280             xarray[loc] += *recvscalar++;
281             break;
282           case INSERT_VALUES:
283             xarray[loc] = *recvscalar++;
284             break;
285           default:
286             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
287           }
288         }
289       } else { /* Block stash */
290         PetscMPIInt count;
291         if (--frame[i].pendingb > 0) continue;
292         if (x->use_status) {
293           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
294           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
295         } else count = x->recvhdr[i].bcount;
296         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
297           PetscInt loc = (*recvint) * bs - X->map->rstart;
298           switch (imode) {
299           case ADD_VALUES:
300             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
301             break;
302           case INSERT_VALUES:
303             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
304             break;
305           default:
306             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
307           }
308         }
309       }
310     }
311   }
312   PetscCall(VecRestoreArray(X, &xarray));
313   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
314   PetscCall(PetscFree2(some_indices, some_statuses));
315   if (x->assembly_subset) {
316     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
317     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
318   } else {
319     PetscCall(VecAssemblyReset_MPI(X));
320   }
321 
322   X->stash.insertmode  = NOT_SET_VALUES;
323   X->bstash.insertmode = NOT_SET_VALUES;
324   PetscCall(VecStashScatterEnd_Private(&X->stash));
325   PetscCall(VecStashScatterEnd_Private(&X->bstash));
326   PetscFunctionReturn(PETSC_SUCCESS);
327 }
328 
329 PetscErrorCode VecAssemblyReset_MPI(Vec X)
330 {
331   Vec_MPI *x = (Vec_MPI *)X->data;
332 
333   PetscFunctionBegin;
334   PetscCall(PetscFree(x->sendreqs));
335   PetscCall(PetscFree(x->recvreqs));
336   PetscCall(PetscFree(x->sendranks));
337   PetscCall(PetscFree(x->recvranks));
338   PetscCall(PetscFree(x->sendhdr));
339   PetscCall(PetscFree(x->recvhdr));
340   PetscCall(PetscFree(x->sendptrs));
341   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
342   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
343   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
344   PetscFunctionReturn(PETSC_SUCCESS);
345 }
346 
347 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
348 {
349 #if !defined(PETSC_HAVE_MPIUNI)
350   PetscBool flg = PETSC_FALSE, set;
351 
352   PetscFunctionBegin;
353   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
354   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
355   if (set) {
356     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
357     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
358   }
359   PetscOptionsHeadEnd();
360 #else
361   PetscFunctionBegin;
362   X->ops->assemblybegin = VecAssemblyBegin_MPI;
363   X->ops->assemblyend   = VecAssemblyEnd_MPI;
364 #endif
365   PetscFunctionReturn(PETSC_SUCCESS);
366 }
367 
368 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
369                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
370                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
371                                PetscDesignatedInitializer(dot, VecDot_MPI),
372                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
373                                PetscDesignatedInitializer(norm, VecNorm_MPI),
374                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
375                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
376                                PetscDesignatedInitializer(scale, VecScale_Seq),
377                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
378                                PetscDesignatedInitializer(set, VecSet_Seq),
379                                PetscDesignatedInitializer(swap, VecSwap_Seq),
380                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
381                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
382                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
383                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
384                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
385                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
386                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
387                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
388                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
389                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
390                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
391                                PetscDesignatedInitializer(getarray, NULL),
392                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
393                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
394                                PetscDesignatedInitializer(restorearray, NULL),
395                                PetscDesignatedInitializer(max, VecMax_MPI),
396                                PetscDesignatedInitializer(min, VecMin_MPI),
397                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
398                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
399                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
400                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
401                                PetscDesignatedInitializer(view, VecView_MPI),
402                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
403                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
404                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
405                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
406                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
407                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
408                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
409                                PetscDesignatedInitializer(load, VecLoad_Default),
410                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
411                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
412                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
413                                PetscDesignatedInitializer(setvalueslocal, NULL),
414                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
415                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
416                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
417                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
418                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
419                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
420                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
421                                PetscDesignatedInitializer(sqrt, NULL),
422                                PetscDesignatedInitializer(abs, NULL),
423                                PetscDesignatedInitializer(exp, NULL),
424                                PetscDesignatedInitializer(log, NULL),
425                                PetscDesignatedInitializer(shift, NULL),
426                                PetscDesignatedInitializer(create, NULL), /* really? */
427                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
428                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
429                                PetscDesignatedInitializer(dotnorm2, NULL),
430                                PetscDesignatedInitializer(getsubvector, NULL),
431                                PetscDesignatedInitializer(restoresubvector, NULL),
432                                PetscDesignatedInitializer(getarrayread, NULL),
433                                PetscDesignatedInitializer(restorearrayread, NULL),
434                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
435                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
436                                PetscDesignatedInitializer(viewnative, VecView_MPI),
437                                PetscDesignatedInitializer(loadnative, NULL),
438                                PetscDesignatedInitializer(createlocalvector, NULL),
439                                PetscDesignatedInitializer(getlocalvector, NULL),
440                                PetscDesignatedInitializer(restorelocalvector, NULL),
441                                PetscDesignatedInitializer(getlocalvectorread, NULL),
442                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
443                                PetscDesignatedInitializer(bindtocpu, NULL),
444                                PetscDesignatedInitializer(getarraywrite, NULL),
445                                PetscDesignatedInitializer(restorearraywrite, NULL),
446                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
447                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
448                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
450                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
452                                PetscDesignatedInitializer(concatenate, NULL),
453                                PetscDesignatedInitializer(sum, NULL),
454                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
455                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI),
456                                PetscDesignatedInitializer(errorwnorm, NULL)};
457 
458 /*
459     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
460     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
461     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
462 
463     If alloc is true and array is NULL then this routine allocates the space, otherwise
464     no space is allocated.
465 */
466 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
467 {
468   Vec_MPI *s;
469 
470   PetscFunctionBegin;
471   PetscCall(PetscNew(&s));
472   v->data        = (void *)s;
473   v->ops[0]      = DvOps;
474   s->nghost      = nghost;
475   v->petscnative = PETSC_TRUE;
476   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
477 
478   PetscCall(PetscLayoutSetUp(v->map));
479 
480   s->array           = (PetscScalar *)array;
481   s->array_allocated = NULL;
482   if (alloc && !array) {
483     PetscInt n = v->map->n + nghost;
484     PetscCall(PetscCalloc1(n, &s->array));
485     s->array_allocated = s->array;
486     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_2], 0));
487     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_1], 0));
488     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_INFINITY], 0));
489   }
490 
491   /* By default parallel vectors do not have local representation */
492   s->localrep    = NULL;
493   s->localupdate = NULL;
494 
495   v->stash.insertmode  = NOT_SET_VALUES;
496   v->bstash.insertmode = NOT_SET_VALUES;
497   /* create the stashes. The block-size for bstash is set later when
498      VecSetValuesBlocked is called.
499   */
500   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
501   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
502 
503 #if defined(PETSC_HAVE_MATLAB)
504   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
505   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
506 #endif
507   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
508   PetscFunctionReturn(PETSC_SUCCESS);
509 }
510 
511 /*MC
512    VECMPI - VECMPI = "mpi" - The basic parallel vector
513 
514    Options Database Key:
515 . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`
516 
517   Level: beginner
518 
519 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
520 M*/
521 
522 PetscErrorCode VecCreate_MPI(Vec vv)
523 {
524   PetscFunctionBegin;
525   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
526   PetscFunctionReturn(PETSC_SUCCESS);
527 }
528 
529 /*MC
530    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process
531 
532    Options Database Key:
533 . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`
534 
535   Level: beginner
536 
537 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
538 M*/
539 
540 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
541 {
542   PetscMPIInt size;
543 
544   PetscFunctionBegin;
545   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
546   if (size == 1) {
547     PetscCall(VecSetType(v, VECSEQ));
548   } else {
549     PetscCall(VecSetType(v, VECMPI));
550   }
551   PetscFunctionReturn(PETSC_SUCCESS);
552 }
553 
554 /*@C
555   VecCreateMPIWithArray - Creates a parallel, array-style vector,
556   where the user provides the array space to store the vector values.
557 
558   Collective
559 
560   Input Parameters:
561 + comm  - the MPI communicator to use
562 . bs    - block size, same meaning as `VecSetBlockSize()`
563 . n     - local vector length, cannot be `PETSC_DECIDE`
564 . N     - global vector length (or `PETSC_DETERMINE` to have calculated)
565 - array - the user provided array to store the vector values
566 
567   Output Parameter:
568 . vv - the vector
569 
570   Level: intermediate
571 
572   Notes:
573   Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
574   same type as an existing vector.
575 
576   If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
577   at a later stage to SET the array for storing the vector values.
578 
579   PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.
580 
581   The user should not free `array` until the vector is destroyed.
582 
583 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
584           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
585 @*/
586 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
587 {
588   PetscFunctionBegin;
589   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
590   PetscCall(PetscSplitOwnership(comm, &n, &N));
591   PetscCall(VecCreate(comm, vv));
592   PetscCall(VecSetSizes(*vv, n, N));
593   PetscCall(VecSetBlockSize(*vv, bs));
594   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
595   PetscFunctionReturn(PETSC_SUCCESS);
596 }
597 
598 /*@C
599   VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
600   the caller allocates the array space.
601 
602   Collective
603 
604   Input Parameters:
605 + comm   - the MPI communicator to use
606 . n      - local vector length
607 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
608 . nghost - number of local ghost points
609 . ghosts - global indices of ghost points (or `NULL` if not needed), these do not need to be in increasing order (sorted)
610 - array  - the space to store the vector values (as long as n + nghost)
611 
612   Output Parameter:
613 . vv - the global vector representation (without ghost points as part of vector)
614 
615   Level: advanced
616 
617   Notes:
618   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
619   of the vector.
620 
621   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
622 
623 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
624           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
625           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
626 @*/
627 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
628 {
629   Vec_MPI               *w;
630   PetscScalar           *larray;
631   IS                     from, to;
632   ISLocalToGlobalMapping ltog;
633   PetscInt               rstart, i, *indices;
634 
635   PetscFunctionBegin;
636   *vv = NULL;
637 
638   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
639   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
640   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
641   PetscCall(PetscSplitOwnership(comm, &n, &N));
642   /* Create global representation */
643   PetscCall(VecCreate(comm, vv));
644   PetscCall(VecSetSizes(*vv, n, N));
645   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
646   w = (Vec_MPI *)(*vv)->data;
647   /* Create local representation */
648   PetscCall(VecGetArray(*vv, &larray));
649   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
650   PetscCall(VecRestoreArray(*vv, &larray));
651 
652   /*
653        Create scatter context for scattering (updating) ghost values
654   */
655   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
656   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
657   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
658   PetscCall(ISDestroy(&to));
659   PetscCall(ISDestroy(&from));
660 
661   /* set local to global mapping for ghosted vector */
662   PetscCall(PetscMalloc1(n + nghost, &indices));
663   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
664   for (i = 0; i < n; i++) indices[i] = rstart + i;
665   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
666   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
667   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
668   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
669   PetscFunctionReturn(PETSC_SUCCESS);
670 }
671 
672 /*@
673   VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
674 
675   Collective
676 
677   Input Parameters:
678 + comm   - the MPI communicator to use
679 . n      - local vector length
680 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
681 . nghost - number of local ghost points
682 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
683 
684   Output Parameter:
685 . vv - the global vector representation (without ghost points as part of vector)
686 
687   Level: advanced
688 
689   Notes:
690   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
691   of the vector.
692 
693   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
694 
695 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
696           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
697           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
698           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
699 
700 @*/
701 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
702 {
703   PetscFunctionBegin;
704   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
705   PetscFunctionReturn(PETSC_SUCCESS);
706 }
707 
708 /*@
709   VecMPISetGhost - Sets the ghost points for an MPI ghost vector
710 
711   Collective
712 
713   Input Parameters:
714 + vv     - the MPI vector
715 . nghost - number of local ghost points
716 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
717 
718   Level: advanced
719 
720   Notes:
721   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
722   of the vector.
723 
724   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
725 
726   You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).
727 
728 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
729           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
730           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
731           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
732 @*/
733 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
734 {
735   PetscBool flg;
736 
737   PetscFunctionBegin;
738   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
739   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
740   if (flg) {
741     PetscInt               n, N;
742     Vec_MPI               *w;
743     PetscScalar           *larray;
744     IS                     from, to;
745     ISLocalToGlobalMapping ltog;
746     PetscInt               rstart, i, *indices;
747     MPI_Comm               comm;
748 
749     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
750     n = vv->map->n;
751     N = vv->map->N;
752     PetscUseTypeMethod(vv, destroy);
753     PetscCall(VecSetSizes(vv, n, N));
754     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
755     w = (Vec_MPI *)(vv)->data;
756     /* Create local representation */
757     PetscCall(VecGetArray(vv, &larray));
758     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
759     PetscCall(VecRestoreArray(vv, &larray));
760 
761     /*
762      Create scatter context for scattering (updating) ghost values
763      */
764     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
765     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
766     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
767     PetscCall(ISDestroy(&to));
768     PetscCall(ISDestroy(&from));
769 
770     /* set local to global mapping for ghosted vector */
771     PetscCall(PetscMalloc1(n + nghost, &indices));
772     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
773 
774     for (i = 0; i < n; i++) indices[i] = rstart + i;
775     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
776 
777     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
778     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
779     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
780   } else {
781     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
782     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
783   }
784   PetscFunctionReturn(PETSC_SUCCESS);
785 }
786 
787 /* ------------------------------------------------------------------------------------------*/
788 /*@C
789   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
790   the caller allocates the array space. Indices in the ghost region are based on blocks.
791 
792   Collective
793 
794   Input Parameters:
795 + comm   - the MPI communicator to use
796 . bs     - block size
797 . n      - local vector length
798 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
799 . nghost - number of local ghost blocks
800 . ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
801 - array  - the space to store the vector values (as long as `n + nghost*bs`)
802 
803   Output Parameter:
804 . vv - the global vector representation (without ghost points as part of vector)
805 
806   Level: advanced
807 
808   Notes:
809   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
810   of the vector.
811 
812   n is the local vector size (total local size not the number of blocks) while nghost
813   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
814   portion is bs*nghost
815 
816 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
817           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
818           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
819 @*/
820 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
821 {
822   Vec_MPI               *w;
823   PetscScalar           *larray;
824   IS                     from, to;
825   ISLocalToGlobalMapping ltog;
826   PetscInt               rstart, i, nb, *indices;
827 
828   PetscFunctionBegin;
829   *vv = NULL;
830 
831   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
832   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
833   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
834   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
835   PetscCall(PetscSplitOwnership(comm, &n, &N));
836   /* Create global representation */
837   PetscCall(VecCreate(comm, vv));
838   PetscCall(VecSetSizes(*vv, n, N));
839   PetscCall(VecSetBlockSize(*vv, bs));
840   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
841   w = (Vec_MPI *)(*vv)->data;
842   /* Create local representation */
843   PetscCall(VecGetArray(*vv, &larray));
844   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
845   PetscCall(VecRestoreArray(*vv, &larray));
846 
847   /*
848        Create scatter context for scattering (updating) ghost values
849   */
850   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
851   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
852   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
853   PetscCall(ISDestroy(&to));
854   PetscCall(ISDestroy(&from));
855 
856   /* set local to global mapping for ghosted vector */
857   nb = n / bs;
858   PetscCall(PetscMalloc1(nb + nghost, &indices));
859   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
860   rstart = rstart / bs;
861 
862   for (i = 0; i < nb; i++) indices[i] = rstart + i;
863   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
864 
865   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
866   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
867   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
868   PetscFunctionReturn(PETSC_SUCCESS);
869 }
870 
871 /*@
872   VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
873   The indicing of the ghost points is done with blocks.
874 
875   Collective
876 
877   Input Parameters:
878 + comm   - the MPI communicator to use
879 . bs     - the block size
880 . n      - local vector length
881 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
882 . nghost - number of local ghost blocks
883 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
884 
885   Output Parameter:
886 . vv - the global vector representation (without ghost points as part of vector)
887 
888   Level: advanced
889 
890   Notes:
891   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
892   of the vector.
893 
894   `n` is the local vector size (total local size not the number of blocks) while `nghost`
895   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
896   portion is `bs*nghost`
897 
898 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
899           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
900           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
901 @*/
902 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
903 {
904   PetscFunctionBegin;
905   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
906   PetscFunctionReturn(PETSC_SUCCESS);
907 }
908