xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision cf17694fb418ca56ab8df99b423a1cf147068433)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin, Vec yin, PetscScalar *z)
9 {
10   PetscScalar sum, work;
11 
12   PetscFunctionBegin;
13   PetscCall(VecDot_Seq(xin, yin, &work));
14   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
15   *z = sum;
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode VecTDot_MPI(Vec xin, Vec yin, PetscScalar *z)
20 {
21   PetscScalar sum, work;
22 
23   PetscFunctionBegin;
24   PetscCall(VecTDot_Seq(xin, yin, &work));
25   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
26   *z = sum;
27   PetscFunctionReturn(0);
28 }
29 
30 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
31 
32 static PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
33 {
34   Vec_MPI *v = (Vec_MPI *)vin->data;
35 
36   PetscFunctionBegin;
37   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
38   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
39   v->array         = (PetscScalar *)a;
40   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
41   PetscFunctionReturn(0);
42 }
43 
44 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
45 {
46   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
47   PetscScalar *array;
48 
49   PetscFunctionBegin;
50   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
51   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
52 
53   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
54   vw = (Vec_MPI *)(*v)->data;
55   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(struct _VecOps)));
56 
57   /* save local representation of the parallel vector (and scatter) if it exists */
58   if (w->localrep) {
59     PetscCall(VecGetArray(*v, &array));
60     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
61     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(struct _VecOps)));
62     PetscCall(VecRestoreArray(*v, &array));
63 
64     vw->localupdate = w->localupdate;
65     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
66   }
67 
68   /* New vector should inherit stashing property of parent */
69   (*v)->stash.donotstash   = win->stash.donotstash;
70   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
71 
72   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
73   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
74 
75   (*v)->map->bs   = PetscAbs(win->map->bs);
76   (*v)->bstash.bs = win->bstash.bs;
77   PetscFunctionReturn(0);
78 }
79 
80 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
81 {
82   Vec_MPI *v = (Vec_MPI *)V->data;
83 
84   PetscFunctionBegin;
85   switch (op) {
86   case VEC_IGNORE_OFF_PROC_ENTRIES:
87     V->stash.donotstash = flag;
88     break;
89   case VEC_IGNORE_NEGATIVE_INDICES:
90     V->stash.ignorenegidx = flag;
91     break;
92   case VEC_SUBSET_OFF_PROC_ENTRIES:
93     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
94     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
95       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
96       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
97     }
98     break;
99   }
100 
101   PetscFunctionReturn(0);
102 }
103 
104 static PetscErrorCode VecResetArray_MPI(Vec vin)
105 {
106   Vec_MPI *v = (Vec_MPI *)vin->data;
107 
108   PetscFunctionBegin;
109   v->array         = v->unplacedarray;
110   v->unplacedarray = NULL;
111   if (v->localrep) PetscCall(VecResetArray(v->localrep));
112   PetscFunctionReturn(0);
113 }
114 
115 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
116 {
117   Vec                X   = (Vec)ctx;
118   Vec_MPI           *x   = (Vec_MPI *)X->data;
119   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
120   PetscInt           bs  = X->map->bs;
121 
122   PetscFunctionBegin;
123   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
124      messages can be empty, but we have to send them this time if we sent them before because the
125      receiver is expecting them.
126    */
127   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
128     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
129     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
130   }
131   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
132     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
133     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
134   }
135   PetscFunctionReturn(0);
136 }
137 
138 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
139 {
140   Vec                X   = (Vec)ctx;
141   Vec_MPI           *x   = (Vec_MPI *)X->data;
142   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
143   PetscInt           bs  = X->map->bs;
144   VecAssemblyFrame  *frame;
145 
146   PetscFunctionBegin;
147   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
148 
149   if (hdr->count) {
150     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
151     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
152     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
153     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
154     frame->pendings = 2;
155   } else {
156     frame->ints     = NULL;
157     frame->scalars  = NULL;
158     frame->pendings = 0;
159   }
160 
161   if (hdr->bcount) {
162     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
163     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
164     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
165     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
166     frame->pendingb = 2;
167   } else {
168     frame->intb     = NULL;
169     frame->scalarb  = NULL;
170     frame->pendingb = 0;
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
176 {
177   Vec_MPI *x = (Vec_MPI *)X->data;
178   MPI_Comm comm;
179   PetscInt i, j, jb, bs;
180 
181   PetscFunctionBegin;
182   if (X->stash.donotstash) PetscFunctionReturn(0);
183 
184   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
185   PetscCall(VecGetBlockSize(X, &bs));
186   if (PetscDefined(USE_DEBUG)) {
187     InsertMode addv;
188     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
189     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
190   }
191   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
192 
193   PetscCall(VecStashSortCompress_Private(&X->stash));
194   PetscCall(VecStashSortCompress_Private(&X->bstash));
195 
196   if (!x->sendranks) {
197     PetscMPIInt nowners, bnowners, *owners, *bowners;
198     PetscInt    ntmp;
199     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
200     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
201     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
202     x->nsendranks = ntmp;
203     PetscCall(PetscFree(owners));
204     PetscCall(PetscFree(bowners));
205     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
206     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
207   }
208   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
209     PetscMPIInt rank         = x->sendranks[i];
210     x->sendhdr[i].insertmode = X->stash.insertmode;
211     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
212      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
213      * there is nothing new to send, so that size-zero messages get sent instead. */
214     x->sendhdr[i].count = 0;
215     if (X->stash.n) {
216       x->sendptrs[i].ints    = &X->stash.idx[j];
217       x->sendptrs[i].scalars = &X->stash.array[j];
218       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
219     }
220     x->sendhdr[i].bcount = 0;
221     if (X->bstash.n) {
222       x->sendptrs[i].intb    = &X->bstash.idx[jb];
223       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
224       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
225     }
226   }
227 
228   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
229   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
230   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
231   if (x->first_assembly_done) { /* this is not the first assembly */
232     PetscMPIInt tag[4];
233     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
234     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
235     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
236     x->use_status = PETSC_TRUE;
237   } else { /* First time assembly */
238     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
239     x->use_status = PETSC_FALSE;
240   }
241 
242   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
243      This line says when assembly_subset is set, then we mark that the first assembly is done.
244    */
245   x->first_assembly_done = x->assembly_subset;
246 
247   {
248     PetscInt nstash, reallocs;
249     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
250     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
251     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
252     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
253   }
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
258 {
259   Vec_MPI          *x  = (Vec_MPI *)X->data;
260   PetscInt          bs = X->map->bs;
261   PetscMPIInt       npending, *some_indices, r;
262   MPI_Status       *some_statuses;
263   PetscScalar      *xarray;
264   VecAssemblyFrame *frame;
265 
266   PetscFunctionBegin;
267   if (X->stash.donotstash) {
268     X->stash.insertmode  = NOT_SET_VALUES;
269     X->bstash.insertmode = NOT_SET_VALUES;
270     PetscFunctionReturn(0);
271   }
272 
273   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
274   PetscCall(VecGetArray(X, &xarray));
275   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
276   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
277   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
278   while (npending > 0) {
279     PetscMPIInt ndone = 0, ii;
280     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
281      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
282      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
283      * subsequent assembly can set a proper subset of the values. */
284     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
285     for (ii = 0; ii < ndone; ii++) {
286       PetscInt     i     = some_indices[ii] / 4, j, k;
287       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
288       PetscInt    *recvint;
289       PetscScalar *recvscalar;
290       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
291       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
292       npending--;
293       if (!blockmsg) { /* Scalar stash */
294         PetscMPIInt count;
295         if (--frame[i].pendings > 0) continue;
296         if (x->use_status) {
297           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
298         } else count = x->recvhdr[i].count;
299         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
300           PetscInt loc = *recvint - X->map->rstart;
301           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
302           switch (imode) {
303           case ADD_VALUES:
304             xarray[loc] += *recvscalar++;
305             break;
306           case INSERT_VALUES:
307             xarray[loc] = *recvscalar++;
308             break;
309           default:
310             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
311           }
312         }
313       } else { /* Block stash */
314         PetscMPIInt count;
315         if (--frame[i].pendingb > 0) continue;
316         if (x->use_status) {
317           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
318           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
319         } else count = x->recvhdr[i].bcount;
320         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
321           PetscInt loc = (*recvint) * bs - X->map->rstart;
322           switch (imode) {
323           case ADD_VALUES:
324             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
325             break;
326           case INSERT_VALUES:
327             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
328             break;
329           default:
330             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
331           }
332         }
333       }
334     }
335   }
336   PetscCall(VecRestoreArray(X, &xarray));
337   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
338   PetscCall(PetscFree2(some_indices, some_statuses));
339   if (x->assembly_subset) {
340     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
341     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
342   } else {
343     PetscCall(VecAssemblyReset_MPI(X));
344   }
345 
346   X->stash.insertmode  = NOT_SET_VALUES;
347   X->bstash.insertmode = NOT_SET_VALUES;
348   PetscCall(VecStashScatterEnd_Private(&X->stash));
349   PetscCall(VecStashScatterEnd_Private(&X->bstash));
350   PetscFunctionReturn(0);
351 }
352 
353 PetscErrorCode VecAssemblyReset_MPI(Vec X)
354 {
355   Vec_MPI *x = (Vec_MPI *)X->data;
356 
357   PetscFunctionBegin;
358   PetscCall(PetscFree(x->sendreqs));
359   PetscCall(PetscFree(x->recvreqs));
360   PetscCall(PetscFree(x->sendranks));
361   PetscCall(PetscFree(x->recvranks));
362   PetscCall(PetscFree(x->sendhdr));
363   PetscCall(PetscFree(x->recvhdr));
364   PetscCall(PetscFree(x->sendptrs));
365   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
366   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
367   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
368   PetscFunctionReturn(0);
369 }
370 
371 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
372 {
373 #if !defined(PETSC_HAVE_MPIUNI)
374   PetscBool flg = PETSC_FALSE, set;
375 
376   PetscFunctionBegin;
377   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
378   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
379   if (set) {
380     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
381     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
382   }
383   PetscOptionsHeadEnd();
384 #else
385   PetscFunctionBegin;
386   X->ops->assemblybegin = VecAssemblyBegin_MPI;
387   X->ops->assemblyend   = VecAssemblyEnd_MPI;
388 #endif
389   PetscFunctionReturn(0);
390 }
391 
392 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
393                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
394                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
395                                PetscDesignatedInitializer(dot, VecDot_MPI),
396                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
397                                PetscDesignatedInitializer(norm, VecNorm_MPI),
398                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
399                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
400                                PetscDesignatedInitializer(scale, VecScale_Seq),
401                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
402                                PetscDesignatedInitializer(set, VecSet_Seq),
403                                PetscDesignatedInitializer(swap, VecSwap_Seq),
404                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
405                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
406                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
407                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
408                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
409                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
410                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
411                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
412                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
413                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
414                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
415                                PetscDesignatedInitializer(getarray, NULL),
416                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
417                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
418                                PetscDesignatedInitializer(restorearray, NULL),
419                                PetscDesignatedInitializer(max, VecMax_MPI),
420                                PetscDesignatedInitializer(min, VecMin_MPI),
421                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
422                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
423                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
424                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
425                                PetscDesignatedInitializer(view, VecView_MPI),
426                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
427                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
428                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
429                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
430                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
431                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
432                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
433                                PetscDesignatedInitializer(load, VecLoad_Default),
434                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
435                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
436                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
437                                PetscDesignatedInitializer(setvalueslocal, NULL),
438                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
439                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
440                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
441                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
442                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
443                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
444                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
445                                PetscDesignatedInitializer(sqrt, NULL),
446                                PetscDesignatedInitializer(abs, NULL),
447                                PetscDesignatedInitializer(exp, NULL),
448                                PetscDesignatedInitializer(log, NULL),
449                                PetscDesignatedInitializer(shift, NULL),
450                                PetscDesignatedInitializer(create, NULL), /* really? */
451                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
452                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
453                                PetscDesignatedInitializer(dotnorm2, NULL),
454                                PetscDesignatedInitializer(getsubvector, NULL),
455                                PetscDesignatedInitializer(restoresubvector, NULL),
456                                PetscDesignatedInitializer(getarrayread, NULL),
457                                PetscDesignatedInitializer(restorearrayread, NULL),
458                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
459                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
460                                PetscDesignatedInitializer(viewnative, VecView_MPI),
461                                PetscDesignatedInitializer(loadnative, NULL),
462                                PetscDesignatedInitializer(createlocalvector, NULL),
463                                PetscDesignatedInitializer(getlocalvector, NULL),
464                                PetscDesignatedInitializer(restorelocalvector, NULL),
465                                PetscDesignatedInitializer(getlocalvectorread, NULL),
466                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
467                                PetscDesignatedInitializer(bindtocpu, NULL),
468                                PetscDesignatedInitializer(getarraywrite, NULL),
469                                PetscDesignatedInitializer(restorearraywrite, NULL),
470                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
471                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
472                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
473                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
474                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
475                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
476                                PetscDesignatedInitializer(concatenate, NULL),
477                                PetscDesignatedInitializer(sum, NULL),
478                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
479                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
480 
481 /*
482     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
483     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
484     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
485 
486     If alloc is true and array is NULL then this routine allocates the space, otherwise
487     no space is allocated.
488 */
489 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
490 {
491   Vec_MPI *s;
492 
493   PetscFunctionBegin;
494   PetscCall(PetscNew(&s));
495   v->data = (void *)s;
496   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
497   s->nghost      = nghost;
498   v->petscnative = PETSC_TRUE;
499   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
500 
501   PetscCall(PetscLayoutSetUp(v->map));
502 
503   s->array           = (PetscScalar *)array;
504   s->array_allocated = NULL;
505   if (alloc && !array) {
506     PetscInt n = v->map->n + nghost;
507     PetscCall(PetscCalloc1(n, &s->array));
508     s->array_allocated = s->array;
509   }
510 
511   /* By default parallel vectors do not have local representation */
512   s->localrep    = NULL;
513   s->localupdate = NULL;
514 
515   v->stash.insertmode  = NOT_SET_VALUES;
516   v->bstash.insertmode = NOT_SET_VALUES;
517   /* create the stashes. The block-size for bstash is set later when
518      VecSetValuesBlocked is called.
519   */
520   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
521   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
522 
523 #if defined(PETSC_HAVE_MATLAB)
524   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
525   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
526 #endif
527   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
528   PetscFunctionReturn(0);
529 }
530 
531 /*MC
532    VECMPI - VECMPI = "mpi" - The basic parallel vector
533 
534    Options Database Keys:
535 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
536 
537   Level: beginner
538 
539 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
540 M*/
541 
542 PetscErrorCode VecCreate_MPI(Vec vv)
543 {
544   PetscFunctionBegin;
545   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
546   PetscFunctionReturn(0);
547 }
548 
549 /*MC
550    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
551 
552    Options Database Keys:
553 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
554 
555   Level: beginner
556 
557 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
558 M*/
559 
560 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
561 {
562   PetscMPIInt size;
563 
564   PetscFunctionBegin;
565   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
566   if (size == 1) {
567     PetscCall(VecSetType(v, VECSEQ));
568   } else {
569     PetscCall(VecSetType(v, VECMPI));
570   }
571   PetscFunctionReturn(0);
572 }
573 
574 /*@C
575    VecCreateMPIWithArray - Creates a parallel, array-style vector,
576    where the user provides the array space to store the vector values.
577 
578    Collective
579 
580    Input Parameters:
581 +  comm  - the MPI communicator to use
582 .  bs    - block size, same meaning as VecSetBlockSize()
583 .  n     - local vector length, cannot be PETSC_DECIDE
584 .  N     - global vector length (or PETSC_DECIDE to have calculated)
585 -  array - the user provided array to store the vector values
586 
587    Output Parameter:
588 .  vv - the vector
589 
590    Notes:
591    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
592    same type as an existing vector.
593 
594    If the user-provided array is NULL, then VecPlaceArray() can be used
595    at a later stage to SET the array for storing the vector values.
596 
597    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
598    The user should not free the array until the vector is destroyed.
599 
600    Level: intermediate
601 
602 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
603           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
604 
605 @*/
606 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
607 {
608   PetscFunctionBegin;
609   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
610   PetscCall(PetscSplitOwnership(comm, &n, &N));
611   PetscCall(VecCreate(comm, vv));
612   PetscCall(VecSetSizes(*vv, n, N));
613   PetscCall(VecSetBlockSize(*vv, bs));
614   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
615   PetscFunctionReturn(0);
616 }
617 
618 /*@C
619    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
620    the caller allocates the array space.
621 
622    Collective
623 
624    Input Parameters:
625 +  comm - the MPI communicator to use
626 .  n - local vector length
627 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
628 .  nghost - number of local ghost points
629 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
630 -  array - the space to store the vector values (as long as n + nghost)
631 
632    Output Parameter:
633 .  vv - the global vector representation (without ghost points as part of vector)
634 
635    Notes:
636    Use VecGhostGetLocalForm() to access the local, ghosted representation
637    of the vector.
638 
639    This also automatically sets the ISLocalToGlobalMapping() for this vector.
640 
641    Level: advanced
642 
643 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
644           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
645           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
646 
647 @*/
648 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
649 {
650   Vec_MPI               *w;
651   PetscScalar           *larray;
652   IS                     from, to;
653   ISLocalToGlobalMapping ltog;
654   PetscInt               rstart, i, *indices;
655 
656   PetscFunctionBegin;
657   *vv = NULL;
658 
659   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
660   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
661   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
662   PetscCall(PetscSplitOwnership(comm, &n, &N));
663   /* Create global representation */
664   PetscCall(VecCreate(comm, vv));
665   PetscCall(VecSetSizes(*vv, n, N));
666   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
667   w = (Vec_MPI *)(*vv)->data;
668   /* Create local representation */
669   PetscCall(VecGetArray(*vv, &larray));
670   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
671   PetscCall(VecRestoreArray(*vv, &larray));
672 
673   /*
674        Create scatter context for scattering (updating) ghost values
675   */
676   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
677   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
678   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
679   PetscCall(ISDestroy(&to));
680   PetscCall(ISDestroy(&from));
681 
682   /* set local to global mapping for ghosted vector */
683   PetscCall(PetscMalloc1(n + nghost, &indices));
684   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
685   for (i = 0; i < n; i++) indices[i] = rstart + i;
686   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
687   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
688   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
689   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
690   PetscFunctionReturn(0);
691 }
692 
693 /*@
694    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
695 
696    Collective
697 
698    Input Parameters:
699 +  comm - the MPI communicator to use
700 .  n - local vector length
701 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
702 .  nghost - number of local ghost points
703 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
704 
705    Output Parameter:
706 .  vv - the global vector representation (without ghost points as part of vector)
707 
708    Notes:
709    Use VecGhostGetLocalForm() to access the local, ghosted representation
710    of the vector.
711 
712    This also automatically sets the ISLocalToGlobalMapping() for this vector.
713 
714    Level: advanced
715 
716 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
717           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
718           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
719           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
720 
721 @*/
722 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
723 {
724   PetscFunctionBegin;
725   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
726   PetscFunctionReturn(0);
727 }
728 
729 /*@
730    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
731 
732    Collective on Vec
733 
734    Input Parameters:
735 +  vv - the MPI vector
736 .  nghost - number of local ghost points
737 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
738 
739    Notes:
740    Use VecGhostGetLocalForm() to access the local, ghosted representation
741    of the vector.
742 
743    This also automatically sets the ISLocalToGlobalMapping() for this vector.
744 
745    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
746 
747    Level: advanced
748 
749 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
750           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
751           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
752           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
753 
754 @*/
755 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
756 {
757   PetscBool flg;
758 
759   PetscFunctionBegin;
760   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
761   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
762   if (flg) {
763     PetscInt               n, N;
764     Vec_MPI               *w;
765     PetscScalar           *larray;
766     IS                     from, to;
767     ISLocalToGlobalMapping ltog;
768     PetscInt               rstart, i, *indices;
769     MPI_Comm               comm;
770 
771     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
772     n = vv->map->n;
773     N = vv->map->N;
774     PetscUseTypeMethod(vv, destroy);
775     PetscCall(VecSetSizes(vv, n, N));
776     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
777     w = (Vec_MPI *)(vv)->data;
778     /* Create local representation */
779     PetscCall(VecGetArray(vv, &larray));
780     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
781     PetscCall(VecRestoreArray(vv, &larray));
782 
783     /*
784      Create scatter context for scattering (updating) ghost values
785      */
786     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
787     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
788     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
789     PetscCall(ISDestroy(&to));
790     PetscCall(ISDestroy(&from));
791 
792     /* set local to global mapping for ghosted vector */
793     PetscCall(PetscMalloc1(n + nghost, &indices));
794     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
795 
796     for (i = 0; i < n; i++) indices[i] = rstart + i;
797     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
798 
799     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
800     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
801     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
802   } else {
803     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
804     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
805   }
806   PetscFunctionReturn(0);
807 }
808 
809 /* ------------------------------------------------------------------------------------------*/
810 /*@C
811    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
812    the caller allocates the array space. Indices in the ghost region are based on blocks.
813 
814    Collective
815 
816    Input Parameters:
817 +  comm - the MPI communicator to use
818 .  bs - block size
819 .  n - local vector length
820 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
821 .  nghost - number of local ghost blocks
822 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
823 -  array - the space to store the vector values (as long as n + nghost*bs)
824 
825    Output Parameter:
826 .  vv - the global vector representation (without ghost points as part of vector)
827 
828    Notes:
829    Use VecGhostGetLocalForm() to access the local, ghosted representation
830    of the vector.
831 
832    n is the local vector size (total local size not the number of blocks) while nghost
833    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
834    portion is bs*nghost
835 
836    Level: advanced
837 
838 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
839           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
840           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
841 
842 @*/
843 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
844 {
845   Vec_MPI               *w;
846   PetscScalar           *larray;
847   IS                     from, to;
848   ISLocalToGlobalMapping ltog;
849   PetscInt               rstart, i, nb, *indices;
850 
851   PetscFunctionBegin;
852   *vv = NULL;
853 
854   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
855   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
856   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
857   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
858   PetscCall(PetscSplitOwnership(comm, &n, &N));
859   /* Create global representation */
860   PetscCall(VecCreate(comm, vv));
861   PetscCall(VecSetSizes(*vv, n, N));
862   PetscCall(VecSetBlockSize(*vv, bs));
863   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
864   w = (Vec_MPI *)(*vv)->data;
865   /* Create local representation */
866   PetscCall(VecGetArray(*vv, &larray));
867   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
868   PetscCall(VecRestoreArray(*vv, &larray));
869 
870   /*
871        Create scatter context for scattering (updating) ghost values
872   */
873   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
874   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
875   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
876   PetscCall(ISDestroy(&to));
877   PetscCall(ISDestroy(&from));
878 
879   /* set local to global mapping for ghosted vector */
880   nb = n / bs;
881   PetscCall(PetscMalloc1(nb + nghost, &indices));
882   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
883   rstart = rstart / bs;
884 
885   for (i = 0; i < nb; i++) indices[i] = rstart + i;
886   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
887 
888   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
889   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
890   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
896         The indicing of the ghost points is done with blocks.
897 
898    Collective
899 
900    Input Parameters:
901 +  comm - the MPI communicator to use
902 .  bs - the block size
903 .  n - local vector length
904 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
905 .  nghost - number of local ghost blocks
906 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
907 
908    Output Parameter:
909 .  vv - the global vector representation (without ghost points as part of vector)
910 
911    Notes:
912    Use VecGhostGetLocalForm() to access the local, ghosted representation
913    of the vector.
914 
915    n is the local vector size (total local size not the number of blocks) while nghost
916    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
917    portion is bs*nghost
918 
919    Level: advanced
920 
921 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
922           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
923           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
924 
925 @*/
926 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
927 {
928   PetscFunctionBegin;
929   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
930   PetscFunctionReturn(0);
931 }
932