xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d18748622689843e8c5ce717aabfcc708e80ab43)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
9   PetscScalar sum, work;
10 
11   PetscFunctionBegin;
12   PetscCall(VecDot_Seq(xin, yin, &work));
13   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
14   *z = sum;
15   PetscFunctionReturn(0);
16 }
17 
18 PetscErrorCode VecTDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
19   PetscScalar sum, work;
20 
21   PetscFunctionBegin;
22   PetscCall(VecTDot_Seq(xin, yin, &work));
23   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
24   *z = sum;
25   PetscFunctionReturn(0);
26 }
27 
28 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
29 
30 static PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a) {
31   Vec_MPI *v = (Vec_MPI *)vin->data;
32 
33   PetscFunctionBegin;
34   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
35   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
36   v->array         = (PetscScalar *)a;
37   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
38   PetscFunctionReturn(0);
39 }
40 
41 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v) {
42   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
43   PetscScalar *array;
44 
45   PetscFunctionBegin;
46   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
47   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
48 
49   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
50   vw = (Vec_MPI *)(*v)->data;
51   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(struct _VecOps)));
52 
53   /* save local representation of the parallel vector (and scatter) if it exists */
54   if (w->localrep) {
55     PetscCall(VecGetArray(*v, &array));
56     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
57     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(struct _VecOps)));
58     PetscCall(VecRestoreArray(*v, &array));
59     PetscCall(PetscLogObjectParent((PetscObject)*v, (PetscObject)vw->localrep));
60 
61     vw->localupdate = w->localupdate;
62     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
63   }
64 
65   /* New vector should inherit stashing property of parent */
66   (*v)->stash.donotstash   = win->stash.donotstash;
67   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
68 
69   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
70   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
71 
72   (*v)->map->bs   = PetscAbs(win->map->bs);
73   (*v)->bstash.bs = win->bstash.bs;
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag) {
78   Vec_MPI *v = (Vec_MPI *)V->data;
79 
80   PetscFunctionBegin;
81   switch (op) {
82   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag; break;
83   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag; break;
84   case VEC_SUBSET_OFF_PROC_ENTRIES:
85     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
86     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
87       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
88       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
89     }
90     break;
91   }
92 
93   PetscFunctionReturn(0);
94 }
95 
96 static PetscErrorCode VecResetArray_MPI(Vec vin) {
97   Vec_MPI *v = (Vec_MPI *)vin->data;
98 
99   PetscFunctionBegin;
100   v->array         = v->unplacedarray;
101   v->unplacedarray = NULL;
102   if (v->localrep) PetscCall(VecResetArray(v->localrep));
103   PetscFunctionReturn(0);
104 }
105 
106 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx) {
107   Vec                X   = (Vec)ctx;
108   Vec_MPI           *x   = (Vec_MPI *)X->data;
109   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
110   PetscInt           bs  = X->map->bs;
111 
112   PetscFunctionBegin;
113   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
114      messages can be empty, but we have to send them this time if we sent them before because the
115      receiver is expecting them.
116    */
117   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
118     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
119     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
120   }
121   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
122     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
123     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx) {
129   Vec                X   = (Vec)ctx;
130   Vec_MPI           *x   = (Vec_MPI *)X->data;
131   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
132   PetscInt           bs  = X->map->bs;
133   VecAssemblyFrame  *frame;
134 
135   PetscFunctionBegin;
136   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
137 
138   if (hdr->count) {
139     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
140     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
141     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
142     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
143     frame->pendings = 2;
144   } else {
145     frame->ints     = NULL;
146     frame->scalars  = NULL;
147     frame->pendings = 0;
148   }
149 
150   if (hdr->bcount) {
151     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
152     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
153     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
154     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
155     frame->pendingb = 2;
156   } else {
157     frame->intb     = NULL;
158     frame->scalarb  = NULL;
159     frame->pendingb = 0;
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X) {
165   Vec_MPI *x = (Vec_MPI *)X->data;
166   MPI_Comm comm;
167   PetscInt i, j, jb, bs;
168 
169   PetscFunctionBegin;
170   if (X->stash.donotstash) PetscFunctionReturn(0);
171 
172   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
173   PetscCall(VecGetBlockSize(X, &bs));
174   if (PetscDefined(USE_DEBUG)) {
175     InsertMode addv;
176     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
177     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
178   }
179   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
180 
181   PetscCall(VecStashSortCompress_Private(&X->stash));
182   PetscCall(VecStashSortCompress_Private(&X->bstash));
183 
184   if (!x->sendranks) {
185     PetscMPIInt nowners, bnowners, *owners, *bowners;
186     PetscInt    ntmp;
187     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
188     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
189     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
190     x->nsendranks = ntmp;
191     PetscCall(PetscFree(owners));
192     PetscCall(PetscFree(bowners));
193     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
194     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
195   }
196   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
197     PetscMPIInt rank         = x->sendranks[i];
198     x->sendhdr[i].insertmode = X->stash.insertmode;
199     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
200      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
201      * there is nothing new to send, so that size-zero messages get sent instead. */
202     x->sendhdr[i].count      = 0;
203     if (X->stash.n) {
204       x->sendptrs[i].ints    = &X->stash.idx[j];
205       x->sendptrs[i].scalars = &X->stash.array[j];
206       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
207     }
208     x->sendhdr[i].bcount = 0;
209     if (X->bstash.n) {
210       x->sendptrs[i].intb    = &X->bstash.idx[jb];
211       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
212       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
213     }
214   }
215 
216   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
217   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
218   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
219   if (x->first_assembly_done) { /* this is not the first assembly */
220     PetscMPIInt tag[4];
221     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
222     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
223     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
224     x->use_status = PETSC_TRUE;
225   } else { /* First time assembly */
226     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
227     x->use_status = PETSC_FALSE;
228   }
229 
230   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
231      This line says when assembly_subset is set, then we mark that the first assembly is done.
232    */
233   x->first_assembly_done = x->assembly_subset;
234 
235   {
236     PetscInt nstash, reallocs;
237     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
238     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
239     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
240     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X) {
246   Vec_MPI          *x  = (Vec_MPI *)X->data;
247   PetscInt          bs = X->map->bs;
248   PetscMPIInt       npending, *some_indices, r;
249   MPI_Status       *some_statuses;
250   PetscScalar      *xarray;
251   VecAssemblyFrame *frame;
252 
253   PetscFunctionBegin;
254   if (X->stash.donotstash) {
255     X->stash.insertmode  = NOT_SET_VALUES;
256     X->bstash.insertmode = NOT_SET_VALUES;
257     PetscFunctionReturn(0);
258   }
259 
260   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
261   PetscCall(VecGetArray(X, &xarray));
262   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
263   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
264   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
265   while (npending > 0) {
266     PetscMPIInt ndone = 0, ii;
267     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
268      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
269      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
270      * subsequent assembly can set a proper subset of the values. */
271     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
272     for (ii = 0; ii < ndone; ii++) {
273       PetscInt     i     = some_indices[ii] / 4, j, k;
274       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
275       PetscInt    *recvint;
276       PetscScalar *recvscalar;
277       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
278       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
279       npending--;
280       if (!blockmsg) { /* Scalar stash */
281         PetscMPIInt count;
282         if (--frame[i].pendings > 0) continue;
283         if (x->use_status) {
284           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
285         } else count = x->recvhdr[i].count;
286         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
287           PetscInt loc = *recvint - X->map->rstart;
288           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
289           switch (imode) {
290           case ADD_VALUES: xarray[loc] += *recvscalar++; break;
291           case INSERT_VALUES: xarray[loc] = *recvscalar++; break;
292           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
293           }
294         }
295       } else { /* Block stash */
296         PetscMPIInt count;
297         if (--frame[i].pendingb > 0) continue;
298         if (x->use_status) {
299           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
300           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
301         } else count = x->recvhdr[i].bcount;
302         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
303           PetscInt loc = (*recvint) * bs - X->map->rstart;
304           switch (imode) {
305           case ADD_VALUES:
306             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
307             break;
308           case INSERT_VALUES:
309             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
310             break;
311           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
312           }
313         }
314       }
315     }
316   }
317   PetscCall(VecRestoreArray(X, &xarray));
318   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
319   PetscCall(PetscFree2(some_indices, some_statuses));
320   if (x->assembly_subset) {
321     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
322     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
323   } else {
324     PetscCall(VecAssemblyReset_MPI(X));
325   }
326 
327   X->stash.insertmode  = NOT_SET_VALUES;
328   X->bstash.insertmode = NOT_SET_VALUES;
329   PetscCall(VecStashScatterEnd_Private(&X->stash));
330   PetscCall(VecStashScatterEnd_Private(&X->bstash));
331   PetscFunctionReturn(0);
332 }
333 
334 PetscErrorCode VecAssemblyReset_MPI(Vec X) {
335   Vec_MPI *x = (Vec_MPI *)X->data;
336 
337   PetscFunctionBegin;
338   PetscCall(PetscFree(x->sendreqs));
339   PetscCall(PetscFree(x->recvreqs));
340   PetscCall(PetscFree(x->sendranks));
341   PetscCall(PetscFree(x->recvranks));
342   PetscCall(PetscFree(x->sendhdr));
343   PetscCall(PetscFree(x->recvhdr));
344   PetscCall(PetscFree(x->sendptrs));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
346   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
347   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject) {
352 #if !defined(PETSC_HAVE_MPIUNI)
353   PetscBool flg = PETSC_FALSE, set;
354 
355   PetscFunctionBegin;
356   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
357   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
358   if (set) {
359     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
360     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
361   }
362   PetscOptionsHeadEnd();
363 #else
364   PetscFunctionBegin;
365   X->ops->assemblybegin = VecAssemblyBegin_MPI;
366   X->ops->assemblyend   = VecAssemblyEnd_MPI;
367 #endif
368   PetscFunctionReturn(0);
369 }
370 
371 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
372                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
373                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
374                                PetscDesignatedInitializer(dot, VecDot_MPI),
375                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
376                                PetscDesignatedInitializer(norm, VecNorm_MPI),
377                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
378                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
379                                PetscDesignatedInitializer(scale, VecScale_Seq),
380                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
381                                PetscDesignatedInitializer(set, VecSet_Seq),
382                                PetscDesignatedInitializer(swap, VecSwap_Seq),
383                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
384                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
385                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
386                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
387                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
388                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
389                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
390                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
391                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
392                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
393                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
394                                PetscDesignatedInitializer(getarray, NULL),
395                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
396                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
397                                PetscDesignatedInitializer(restorearray, NULL),
398                                PetscDesignatedInitializer(max, VecMax_MPI),
399                                PetscDesignatedInitializer(min, VecMin_MPI),
400                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
401                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
402                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
403                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
404                                PetscDesignatedInitializer(view, VecView_MPI),
405                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
406                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
407                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
408                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
409                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
410                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
411                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
412                                PetscDesignatedInitializer(load, VecLoad_Default),
413                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
414                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
415                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
416                                PetscDesignatedInitializer(setvalueslocal, NULL),
417                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
418                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
419                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
420                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
421                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
422                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
423                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
424                                PetscDesignatedInitializer(sqrt, NULL),
425                                PetscDesignatedInitializer(abs, NULL),
426                                PetscDesignatedInitializer(exp, NULL),
427                                PetscDesignatedInitializer(log, NULL),
428                                PetscDesignatedInitializer(shift, NULL),
429                                PetscDesignatedInitializer(create, NULL), /* really? */
430                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
431                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
432                                PetscDesignatedInitializer(dotnorm2, NULL),
433                                PetscDesignatedInitializer(getsubvector, NULL),
434                                PetscDesignatedInitializer(restoresubvector, NULL),
435                                PetscDesignatedInitializer(getarrayread, NULL),
436                                PetscDesignatedInitializer(restorearrayread, NULL),
437                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
438                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
439                                PetscDesignatedInitializer(viewnative, VecView_MPI),
440                                PetscDesignatedInitializer(loadnative, NULL),
441                                PetscDesignatedInitializer(createlocalvector, NULL),
442                                PetscDesignatedInitializer(getlocalvector, NULL),
443                                PetscDesignatedInitializer(restorelocalvector, NULL),
444                                PetscDesignatedInitializer(getlocalvectorread, NULL),
445                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
446                                PetscDesignatedInitializer(bindtocpu, NULL),
447                                PetscDesignatedInitializer(getarraywrite, NULL),
448                                PetscDesignatedInitializer(restorearraywrite, NULL),
449                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
450                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
451                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
453                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
455                                PetscDesignatedInitializer(concatenate, NULL),
456                                PetscDesignatedInitializer(sum, NULL),
457                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
458                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
459 
460 /*
461     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
462     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
463     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
464 
465     If alloc is true and array is NULL then this routine allocates the space, otherwise
466     no space is allocated.
467 */
468 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[]) {
469   Vec_MPI *s;
470 
471   PetscFunctionBegin;
472   PetscCall(PetscNewLog(v, &s));
473   v->data = (void *)s;
474   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
475   s->nghost      = nghost;
476   v->petscnative = PETSC_TRUE;
477   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
478 
479   PetscCall(PetscLayoutSetUp(v->map));
480 
481   s->array           = (PetscScalar *)array;
482   s->array_allocated = NULL;
483   if (alloc && !array) {
484     PetscInt n = v->map->n + nghost;
485     PetscCall(PetscCalloc1(n, &s->array));
486     PetscCall(PetscLogObjectMemory((PetscObject)v, n * sizeof(PetscScalar)));
487     s->array_allocated = s->array;
488   }
489 
490   /* By default parallel vectors do not have local representation */
491   s->localrep    = NULL;
492   s->localupdate = NULL;
493 
494   v->stash.insertmode  = NOT_SET_VALUES;
495   v->bstash.insertmode = NOT_SET_VALUES;
496   /* create the stashes. The block-size for bstash is set later when
497      VecSetValuesBlocked is called.
498   */
499   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
500   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
501 
502 #if defined(PETSC_HAVE_MATLAB_ENGINE)
503   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
504   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
505 #endif
506   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
507   PetscFunctionReturn(0);
508 }
509 
510 /*MC
511    VECMPI - VECMPI = "mpi" - The basic parallel vector
512 
513    Options Database Keys:
514 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
515 
516   Level: beginner
517 
518 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
519 M*/
520 
521 PetscErrorCode VecCreate_MPI(Vec vv) {
522   PetscFunctionBegin;
523   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
524   PetscFunctionReturn(0);
525 }
526 
527 /*MC
528    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
529 
530    Options Database Keys:
531 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
532 
533   Level: beginner
534 
535 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
536 M*/
537 
538 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) {
539   PetscMPIInt size;
540 
541   PetscFunctionBegin;
542   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
543   if (size == 1) {
544     PetscCall(VecSetType(v, VECSEQ));
545   } else {
546     PetscCall(VecSetType(v, VECMPI));
547   }
548   PetscFunctionReturn(0);
549 }
550 
551 /*@C
552    VecCreateMPIWithArray - Creates a parallel, array-style vector,
553    where the user provides the array space to store the vector values.
554 
555    Collective
556 
557    Input Parameters:
558 +  comm  - the MPI communicator to use
559 .  bs    - block size, same meaning as VecSetBlockSize()
560 .  n     - local vector length, cannot be PETSC_DECIDE
561 .  N     - global vector length (or PETSC_DECIDE to have calculated)
562 -  array - the user provided array to store the vector values
563 
564    Output Parameter:
565 .  vv - the vector
566 
567    Notes:
568    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
569    same type as an existing vector.
570 
571    If the user-provided array is NULL, then VecPlaceArray() can be used
572    at a later stage to SET the array for storing the vector values.
573 
574    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
575    The user should not free the array until the vector is destroyed.
576 
577    Level: intermediate
578 
579 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
580           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
581 
582 @*/
583 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv) {
584   PetscFunctionBegin;
585   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
586   PetscCall(PetscSplitOwnership(comm, &n, &N));
587   PetscCall(VecCreate(comm, vv));
588   PetscCall(VecSetSizes(*vv, n, N));
589   PetscCall(VecSetBlockSize(*vv, bs));
590   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
591   PetscFunctionReturn(0);
592 }
593 
594 /*@C
595    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
596    the caller allocates the array space.
597 
598    Collective
599 
600    Input Parameters:
601 +  comm - the MPI communicator to use
602 .  n - local vector length
603 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
604 .  nghost - number of local ghost points
605 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
606 -  array - the space to store the vector values (as long as n + nghost)
607 
608    Output Parameter:
609 .  vv - the global vector representation (without ghost points as part of vector)
610 
611    Notes:
612    Use VecGhostGetLocalForm() to access the local, ghosted representation
613    of the vector.
614 
615    This also automatically sets the ISLocalToGlobalMapping() for this vector.
616 
617    Level: advanced
618 
619 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
620           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
621           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
622 
623 @*/
624 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
625   Vec_MPI               *w;
626   PetscScalar           *larray;
627   IS                     from, to;
628   ISLocalToGlobalMapping ltog;
629   PetscInt               rstart, i, *indices;
630 
631   PetscFunctionBegin;
632   *vv = NULL;
633 
634   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
635   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
636   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
637   PetscCall(PetscSplitOwnership(comm, &n, &N));
638   /* Create global representation */
639   PetscCall(VecCreate(comm, vv));
640   PetscCall(VecSetSizes(*vv, n, N));
641   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
642   w = (Vec_MPI *)(*vv)->data;
643   /* Create local representation */
644   PetscCall(VecGetArray(*vv, &larray));
645   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
646   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localrep));
647   PetscCall(VecRestoreArray(*vv, &larray));
648 
649   /*
650        Create scatter context for scattering (updating) ghost values
651   */
652   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
653   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
654   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
655   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localupdate));
656   PetscCall(ISDestroy(&to));
657   PetscCall(ISDestroy(&from));
658 
659   /* set local to global mapping for ghosted vector */
660   PetscCall(PetscMalloc1(n + nghost, &indices));
661   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
662   for (i = 0; i < n; i++) indices[i] = rstart + i;
663   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
664   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
665   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
666   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
667   PetscFunctionReturn(0);
668 }
669 
670 /*@
671    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
672 
673    Collective
674 
675    Input Parameters:
676 +  comm - the MPI communicator to use
677 .  n - local vector length
678 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
679 .  nghost - number of local ghost points
680 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
681 
682    Output Parameter:
683 .  vv - the global vector representation (without ghost points as part of vector)
684 
685    Notes:
686    Use VecGhostGetLocalForm() to access the local, ghosted representation
687    of the vector.
688 
689    This also automatically sets the ISLocalToGlobalMapping() for this vector.
690 
691    Level: advanced
692 
693 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
694           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
695           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
696           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
697 
698 @*/
699 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
700   PetscFunctionBegin;
701   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
702   PetscFunctionReturn(0);
703 }
704 
705 /*@
706    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
707 
708    Collective on Vec
709 
710    Input Parameters:
711 +  vv - the MPI vector
712 .  nghost - number of local ghost points
713 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
714 
715    Notes:
716    Use VecGhostGetLocalForm() to access the local, ghosted representation
717    of the vector.
718 
719    This also automatically sets the ISLocalToGlobalMapping() for this vector.
720 
721    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
722 
723    Level: advanced
724 
725 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
726           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
727           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
728           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
729 
730 @*/
731 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[]) {
732   PetscBool flg;
733 
734   PetscFunctionBegin;
735   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
736   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
737   if (flg) {
738     PetscInt               n, N;
739     Vec_MPI               *w;
740     PetscScalar           *larray;
741     IS                     from, to;
742     ISLocalToGlobalMapping ltog;
743     PetscInt               rstart, i, *indices;
744     MPI_Comm               comm;
745 
746     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
747     n = vv->map->n;
748     N = vv->map->N;
749     PetscUseTypeMethod(vv, destroy);
750     PetscCall(VecSetSizes(vv, n, N));
751     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
752     w = (Vec_MPI *)(vv)->data;
753     /* Create local representation */
754     PetscCall(VecGetArray(vv, &larray));
755     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
756     PetscCall(PetscLogObjectParent((PetscObject)vv, (PetscObject)w->localrep));
757     PetscCall(VecRestoreArray(vv, &larray));
758 
759     /*
760      Create scatter context for scattering (updating) ghost values
761      */
762     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
763     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
764     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
765     PetscCall(PetscLogObjectParent((PetscObject)vv, (PetscObject)w->localupdate));
766     PetscCall(ISDestroy(&to));
767     PetscCall(ISDestroy(&from));
768 
769     /* set local to global mapping for ghosted vector */
770     PetscCall(PetscMalloc1(n + nghost, &indices));
771     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
772 
773     for (i = 0; i < n; i++) indices[i] = rstart + i;
774     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
775 
776     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
777     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
778     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
779   } else {
780     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
781     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
782   }
783   PetscFunctionReturn(0);
784 }
785 
786 /* ------------------------------------------------------------------------------------------*/
787 /*@C
788    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
789    the caller allocates the array space. Indices in the ghost region are based on blocks.
790 
791    Collective
792 
793    Input Parameters:
794 +  comm - the MPI communicator to use
795 .  bs - block size
796 .  n - local vector length
797 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
798 .  nghost - number of local ghost blocks
799 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
800 -  array - the space to store the vector values (as long as n + nghost*bs)
801 
802    Output Parameter:
803 .  vv - the global vector representation (without ghost points as part of vector)
804 
805    Notes:
806    Use VecGhostGetLocalForm() to access the local, ghosted representation
807    of the vector.
808 
809    n is the local vector size (total local size not the number of blocks) while nghost
810    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
811    portion is bs*nghost
812 
813    Level: advanced
814 
815 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
816           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
817           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
818 
819 @*/
820 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
821   Vec_MPI               *w;
822   PetscScalar           *larray;
823   IS                     from, to;
824   ISLocalToGlobalMapping ltog;
825   PetscInt               rstart, i, nb, *indices;
826 
827   PetscFunctionBegin;
828   *vv = NULL;
829 
830   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
831   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
832   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
833   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
834   PetscCall(PetscSplitOwnership(comm, &n, &N));
835   /* Create global representation */
836   PetscCall(VecCreate(comm, vv));
837   PetscCall(VecSetSizes(*vv, n, N));
838   PetscCall(VecSetBlockSize(*vv, bs));
839   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
840   w = (Vec_MPI *)(*vv)->data;
841   /* Create local representation */
842   PetscCall(VecGetArray(*vv, &larray));
843   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
844   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localrep));
845   PetscCall(VecRestoreArray(*vv, &larray));
846 
847   /*
848        Create scatter context for scattering (updating) ghost values
849   */
850   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
851   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
852   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
853   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localupdate));
854   PetscCall(ISDestroy(&to));
855   PetscCall(ISDestroy(&from));
856 
857   /* set local to global mapping for ghosted vector */
858   nb = n / bs;
859   PetscCall(PetscMalloc1(nb + nghost, &indices));
860   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
861   rstart = rstart / bs;
862 
863   for (i = 0; i < nb; i++) indices[i] = rstart + i;
864   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
865 
866   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
867   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
868   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
869   PetscFunctionReturn(0);
870 }
871 
872 /*@
873    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
874         The indicing of the ghost points is done with blocks.
875 
876    Collective
877 
878    Input Parameters:
879 +  comm - the MPI communicator to use
880 .  bs - the block size
881 .  n - local vector length
882 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
883 .  nghost - number of local ghost blocks
884 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
885 
886    Output Parameter:
887 .  vv - the global vector representation (without ghost points as part of vector)
888 
889    Notes:
890    Use VecGhostGetLocalForm() to access the local, ghosted representation
891    of the vector.
892 
893    n is the local vector size (total local size not the number of blocks) while nghost
894    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
895    portion is bs*nghost
896 
897    Level: advanced
898 
899 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
900           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
901           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
902 
903 @*/
904 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
905   PetscFunctionBegin;
906   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
907   PetscFunctionReturn(0);
908 }
909