xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 9475a72bd2a60c4c284a56351bd125b0567f0945)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
9   PetscScalar sum, work;
10 
11   PetscFunctionBegin;
12   PetscCall(VecDot_Seq(xin, yin, &work));
13   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
14   *z = sum;
15   PetscFunctionReturn(0);
16 }
17 
18 PetscErrorCode VecTDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
19   PetscScalar sum, work;
20 
21   PetscFunctionBegin;
22   PetscCall(VecTDot_Seq(xin, yin, &work));
23   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
24   *z = sum;
25   PetscFunctionReturn(0);
26 }
27 
28 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
29 
30 static PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a) {
31   Vec_MPI *v = (Vec_MPI *)vin->data;
32 
33   PetscFunctionBegin;
34   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
35   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
36   v->array         = (PetscScalar *)a;
37   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
38   PetscFunctionReturn(0);
39 }
40 
41 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v) {
42   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
43   PetscScalar *array;
44 
45   PetscFunctionBegin;
46   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
47   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
48 
49   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
50   vw = (Vec_MPI *)(*v)->data;
51   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(struct _VecOps)));
52 
53   /* save local representation of the parallel vector (and scatter) if it exists */
54   if (w->localrep) {
55     PetscCall(VecGetArray(*v, &array));
56     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
57     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(struct _VecOps)));
58     PetscCall(VecRestoreArray(*v, &array));
59     PetscCall(PetscLogObjectParent((PetscObject)*v, (PetscObject)vw->localrep));
60 
61     vw->localupdate = w->localupdate;
62     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
63   }
64 
65   /* New vector should inherit stashing property of parent */
66   (*v)->stash.donotstash   = win->stash.donotstash;
67   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
68 
69   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
70   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
71 
72   (*v)->map->bs   = PetscAbs(win->map->bs);
73   (*v)->bstash.bs = win->bstash.bs;
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag) {
78   Vec_MPI *v = (Vec_MPI *)V->data;
79 
80   PetscFunctionBegin;
81   switch (op) {
82   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag; break;
83   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag; break;
84   case VEC_SUBSET_OFF_PROC_ENTRIES:
85     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
86     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
87       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
88       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
89     }
90     break;
91   }
92 
93   PetscFunctionReturn(0);
94 }
95 
96 static PetscErrorCode VecResetArray_MPI(Vec vin) {
97   Vec_MPI *v = (Vec_MPI *)vin->data;
98 
99   PetscFunctionBegin;
100   v->array         = v->unplacedarray;
101   v->unplacedarray = NULL;
102   if (v->localrep) PetscCall(VecResetArray(v->localrep));
103   PetscFunctionReturn(0);
104 }
105 
106 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx) {
107   Vec                X   = (Vec)ctx;
108   Vec_MPI           *x   = (Vec_MPI *)X->data;
109   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
110   PetscInt           bs  = X->map->bs;
111 
112   PetscFunctionBegin;
113   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
114      messages can be empty, but we have to send them this time if we sent them before because the
115      receiver is expecting them.
116    */
117   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
118     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
119     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
120   }
121   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
122     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
123     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx) {
129   Vec                X   = (Vec)ctx;
130   Vec_MPI           *x   = (Vec_MPI *)X->data;
131   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
132   PetscInt           bs  = X->map->bs;
133   VecAssemblyFrame  *frame;
134 
135   PetscFunctionBegin;
136   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
137 
138   if (hdr->count) {
139     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
140     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
141     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
142     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
143     frame->pendings = 2;
144   } else {
145     frame->ints     = NULL;
146     frame->scalars  = NULL;
147     frame->pendings = 0;
148   }
149 
150   if (hdr->bcount) {
151     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
152     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
153     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
154     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
155     frame->pendingb = 2;
156   } else {
157     frame->intb     = NULL;
158     frame->scalarb  = NULL;
159     frame->pendingb = 0;
160   }
161   PetscFunctionReturn(0);
162 }
163 
164 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X) {
165   Vec_MPI *x = (Vec_MPI *)X->data;
166   MPI_Comm comm;
167   PetscInt i, j, jb, bs;
168 
169   PetscFunctionBegin;
170   if (X->stash.donotstash) PetscFunctionReturn(0);
171 
172   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
173   PetscCall(VecGetBlockSize(X, &bs));
174   if (PetscDefined(USE_DEBUG)) {
175     InsertMode addv;
176     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
177     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
178   }
179   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
180 
181   PetscCall(VecStashSortCompress_Private(&X->stash));
182   PetscCall(VecStashSortCompress_Private(&X->bstash));
183 
184   if (!x->sendranks) {
185     PetscMPIInt nowners, bnowners, *owners, *bowners;
186     PetscInt    ntmp;
187     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
188     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
189     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
190     x->nsendranks = ntmp;
191     PetscCall(PetscFree(owners));
192     PetscCall(PetscFree(bowners));
193     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
194     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
195   }
196   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
197     PetscMPIInt rank         = x->sendranks[i];
198     x->sendhdr[i].insertmode = X->stash.insertmode;
199     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
200      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
201      * there is nothing new to send, so that size-zero messages get sent instead. */
202     x->sendhdr[i].count      = 0;
203     if (X->stash.n) {
204       x->sendptrs[i].ints    = &X->stash.idx[j];
205       x->sendptrs[i].scalars = &X->stash.array[j];
206       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
207     }
208     x->sendhdr[i].bcount = 0;
209     if (X->bstash.n) {
210       x->sendptrs[i].intb    = &X->bstash.idx[jb];
211       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
212       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
213     }
214   }
215 
216   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
217   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
218   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
219   if (x->first_assembly_done) { /* this is not the first assembly */
220     PetscMPIInt tag[4];
221     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
222     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
223     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
224     x->use_status = PETSC_TRUE;
225   } else { /* First time assembly */
226     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
227     x->use_status = PETSC_FALSE;
228   }
229 
230   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
231      This line says when assembly_subset is set, then we mark that the first assembly is done.
232    */
233   x->first_assembly_done = x->assembly_subset;
234 
235   {
236     PetscInt nstash, reallocs;
237     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
238     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
239     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
240     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X) {
246   Vec_MPI          *x  = (Vec_MPI *)X->data;
247   PetscInt          bs = X->map->bs;
248   PetscMPIInt       npending, *some_indices, r;
249   MPI_Status       *some_statuses;
250   PetscScalar      *xarray;
251   VecAssemblyFrame *frame;
252 
253   PetscFunctionBegin;
254   if (X->stash.donotstash) {
255     X->stash.insertmode  = NOT_SET_VALUES;
256     X->bstash.insertmode = NOT_SET_VALUES;
257     PetscFunctionReturn(0);
258   }
259 
260   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
261   PetscCall(VecGetArray(X, &xarray));
262   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
263   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
264   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
265   while (npending > 0) {
266     PetscMPIInt ndone = 0, ii;
267     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
268      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
269      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
270      * subsequent assembly can set a proper subset of the values. */
271     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
272     for (ii = 0; ii < ndone; ii++) {
273       PetscInt     i     = some_indices[ii] / 4, j, k;
274       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
275       PetscInt    *recvint;
276       PetscScalar *recvscalar;
277       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
278       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
279       npending--;
280       if (!blockmsg) { /* Scalar stash */
281         PetscMPIInt count;
282         if (--frame[i].pendings > 0) continue;
283         if (x->use_status) {
284           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
285         } else count = x->recvhdr[i].count;
286         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
287           PetscInt loc = *recvint - X->map->rstart;
288           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
289           switch (imode) {
290           case ADD_VALUES: xarray[loc] += *recvscalar++; break;
291           case INSERT_VALUES: xarray[loc] = *recvscalar++; break;
292           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
293           }
294         }
295       } else { /* Block stash */
296         PetscMPIInt count;
297         if (--frame[i].pendingb > 0) continue;
298         if (x->use_status) {
299           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
300           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
301         } else count = x->recvhdr[i].bcount;
302         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
303           PetscInt loc = (*recvint) * bs - X->map->rstart;
304           switch (imode) {
305           case ADD_VALUES:
306             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
307             break;
308           case INSERT_VALUES:
309             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
310             break;
311           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
312           }
313         }
314       }
315     }
316   }
317   PetscCall(VecRestoreArray(X, &xarray));
318   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
319   PetscCall(PetscFree2(some_indices, some_statuses));
320   if (x->assembly_subset) {
321     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
322     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
323   } else {
324     PetscCall(VecAssemblyReset_MPI(X));
325   }
326 
327   X->stash.insertmode  = NOT_SET_VALUES;
328   X->bstash.insertmode = NOT_SET_VALUES;
329   PetscCall(VecStashScatterEnd_Private(&X->stash));
330   PetscCall(VecStashScatterEnd_Private(&X->bstash));
331   PetscFunctionReturn(0);
332 }
333 
334 PetscErrorCode VecAssemblyReset_MPI(Vec X) {
335   Vec_MPI *x = (Vec_MPI *)X->data;
336 
337   PetscFunctionBegin;
338   PetscCall(PetscFree(x->sendreqs));
339   PetscCall(PetscFree(x->recvreqs));
340   PetscCall(PetscFree(x->sendranks));
341   PetscCall(PetscFree(x->recvranks));
342   PetscCall(PetscFree(x->sendhdr));
343   PetscCall(PetscFree(x->recvhdr));
344   PetscCall(PetscFree(x->sendptrs));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
346   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
347   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject) {
352 #if !defined(PETSC_HAVE_MPIUNI)
353   PetscBool flg = PETSC_FALSE, set;
354 
355   PetscFunctionBegin;
356   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
357   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
358   if (set) {
359     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
360     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
361   }
362   PetscOptionsHeadEnd();
363 #else
364   PetscFunctionBegin;
365   X->ops->assemblybegin = VecAssemblyBegin_MPI;
366   X->ops->assemblyend   = VecAssemblyEnd_MPI;
367 #endif
368   PetscFunctionReturn(0);
369 }
370 
371 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
372                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
373                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
374                                PetscDesignatedInitializer(dot, VecDot_MPI),
375                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
376                                PetscDesignatedInitializer(norm, VecNorm_MPI),
377                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
378                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
379                                PetscDesignatedInitializer(scale, VecScale_Seq),
380                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
381                                PetscDesignatedInitializer(set, VecSet_Seq),
382                                PetscDesignatedInitializer(swap, VecSwap_Seq),
383                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
384                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
385                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
386                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
387                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
388                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
389                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
390                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
391                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
392                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
393                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
394                                PetscDesignatedInitializer(getarray, NULL),
395                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
396                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
397                                PetscDesignatedInitializer(restorearray, NULL),
398                                PetscDesignatedInitializer(max, VecMax_MPI),
399                                PetscDesignatedInitializer(min, VecMin_MPI),
400                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
401                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
402                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
403                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
404                                PetscDesignatedInitializer(view, VecView_MPI),
405                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
406                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
407                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
408                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
409                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
410                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
411                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
412                                PetscDesignatedInitializer(load, VecLoad_Default),
413                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
414                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
415                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
416                                PetscDesignatedInitializer(setvalueslocal, NULL),
417                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
418                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
419                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
420                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
421                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
422                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
423                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
424                                PetscDesignatedInitializer(sqrt, NULL),
425                                PetscDesignatedInitializer(abs, NULL),
426                                PetscDesignatedInitializer(exp, NULL),
427                                PetscDesignatedInitializer(log, NULL),
428                                PetscDesignatedInitializer(shift, NULL),
429                                PetscDesignatedInitializer(create, NULL), /* really? */
430                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
431                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
432                                PetscDesignatedInitializer(dotnorm2, NULL),
433                                PetscDesignatedInitializer(getsubvector, NULL),
434                                PetscDesignatedInitializer(restoresubvector, NULL),
435                                PetscDesignatedInitializer(getarrayread, NULL),
436                                PetscDesignatedInitializer(restorearrayread, NULL),
437                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
438                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
439                                PetscDesignatedInitializer(viewnative, VecView_MPI),
440                                PetscDesignatedInitializer(loadnative, NULL),
441                                PetscDesignatedInitializer(getlocalvector, NULL),
442                                PetscDesignatedInitializer(restorelocalvector, NULL),
443                                PetscDesignatedInitializer(getlocalvectorread, NULL),
444                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
445                                PetscDesignatedInitializer(bindtocpu, NULL),
446                                PetscDesignatedInitializer(getarraywrite, NULL),
447                                PetscDesignatedInitializer(restorearraywrite, NULL),
448                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
450                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
453                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(concatenate, NULL),
455                                PetscDesignatedInitializer(sum, NULL),
456                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
457                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
458 
459 /*
460     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
461     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
462     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
463 
464     If alloc is true and array is NULL then this routine allocates the space, otherwise
465     no space is allocated.
466 */
467 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[]) {
468   Vec_MPI *s;
469 
470   PetscFunctionBegin;
471   PetscCall(PetscNewLog(v, &s));
472   v->data = (void *)s;
473   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
474   s->nghost      = nghost;
475   v->petscnative = PETSC_TRUE;
476   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
477 
478   PetscCall(PetscLayoutSetUp(v->map));
479 
480   s->array           = (PetscScalar *)array;
481   s->array_allocated = NULL;
482   if (alloc && !array) {
483     PetscInt n = v->map->n + nghost;
484     PetscCall(PetscCalloc1(n, &s->array));
485     PetscCall(PetscLogObjectMemory((PetscObject)v, n * sizeof(PetscScalar)));
486     s->array_allocated = s->array;
487   }
488 
489   /* By default parallel vectors do not have local representation */
490   s->localrep    = NULL;
491   s->localupdate = NULL;
492 
493   v->stash.insertmode  = NOT_SET_VALUES;
494   v->bstash.insertmode = NOT_SET_VALUES;
495   /* create the stashes. The block-size for bstash is set later when
496      VecSetValuesBlocked is called.
497   */
498   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
499   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
500 
501 #if defined(PETSC_HAVE_MATLAB_ENGINE)
502   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
503   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
504 #endif
505   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
506   PetscFunctionReturn(0);
507 }
508 
509 /*MC
510    VECMPI - VECMPI = "mpi" - The basic parallel vector
511 
512    Options Database Keys:
513 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
514 
515   Level: beginner
516 
517 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
518 M*/
519 
520 PetscErrorCode VecCreate_MPI(Vec vv) {
521   PetscFunctionBegin;
522   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
523   PetscFunctionReturn(0);
524 }
525 
526 /*MC
527    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
528 
529    Options Database Keys:
530 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
531 
532   Level: beginner
533 
534 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
535 M*/
536 
537 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) {
538   PetscMPIInt size;
539 
540   PetscFunctionBegin;
541   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
542   if (size == 1) {
543     PetscCall(VecSetType(v, VECSEQ));
544   } else {
545     PetscCall(VecSetType(v, VECMPI));
546   }
547   PetscFunctionReturn(0);
548 }
549 
550 /*@C
551    VecCreateMPIWithArray - Creates a parallel, array-style vector,
552    where the user provides the array space to store the vector values.
553 
554    Collective
555 
556    Input Parameters:
557 +  comm  - the MPI communicator to use
558 .  bs    - block size, same meaning as VecSetBlockSize()
559 .  n     - local vector length, cannot be PETSC_DECIDE
560 .  N     - global vector length (or PETSC_DECIDE to have calculated)
561 -  array - the user provided array to store the vector values
562 
563    Output Parameter:
564 .  vv - the vector
565 
566    Notes:
567    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
568    same type as an existing vector.
569 
570    If the user-provided array is NULL, then VecPlaceArray() can be used
571    at a later stage to SET the array for storing the vector values.
572 
573    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
574    The user should not free the array until the vector is destroyed.
575 
576    Level: intermediate
577 
578 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
579           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
580 
581 @*/
582 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv) {
583   PetscFunctionBegin;
584   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
585   PetscCall(PetscSplitOwnership(comm, &n, &N));
586   PetscCall(VecCreate(comm, vv));
587   PetscCall(VecSetSizes(*vv, n, N));
588   PetscCall(VecSetBlockSize(*vv, bs));
589   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
590   PetscFunctionReturn(0);
591 }
592 
593 /*@C
594    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
595    the caller allocates the array space.
596 
597    Collective
598 
599    Input Parameters:
600 +  comm - the MPI communicator to use
601 .  n - local vector length
602 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
603 .  nghost - number of local ghost points
604 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
605 -  array - the space to store the vector values (as long as n + nghost)
606 
607    Output Parameter:
608 .  vv - the global vector representation (without ghost points as part of vector)
609 
610    Notes:
611    Use VecGhostGetLocalForm() to access the local, ghosted representation
612    of the vector.
613 
614    This also automatically sets the ISLocalToGlobalMapping() for this vector.
615 
616    Level: advanced
617 
618 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
619           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
620           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
621 
622 @*/
623 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
624   Vec_MPI               *w;
625   PetscScalar           *larray;
626   IS                     from, to;
627   ISLocalToGlobalMapping ltog;
628   PetscInt               rstart, i, *indices;
629 
630   PetscFunctionBegin;
631   *vv = NULL;
632 
633   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
634   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
635   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
636   PetscCall(PetscSplitOwnership(comm, &n, &N));
637   /* Create global representation */
638   PetscCall(VecCreate(comm, vv));
639   PetscCall(VecSetSizes(*vv, n, N));
640   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
641   w = (Vec_MPI *)(*vv)->data;
642   /* Create local representation */
643   PetscCall(VecGetArray(*vv, &larray));
644   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
645   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localrep));
646   PetscCall(VecRestoreArray(*vv, &larray));
647 
648   /*
649        Create scatter context for scattering (updating) ghost values
650   */
651   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
652   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
653   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
654   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localupdate));
655   PetscCall(ISDestroy(&to));
656   PetscCall(ISDestroy(&from));
657 
658   /* set local to global mapping for ghosted vector */
659   PetscCall(PetscMalloc1(n + nghost, &indices));
660   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
661   for (i = 0; i < n; i++) indices[i] = rstart + i;
662   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
663   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
664   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
665   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
666   PetscFunctionReturn(0);
667 }
668 
669 /*@
670    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
671 
672    Collective
673 
674    Input Parameters:
675 +  comm - the MPI communicator to use
676 .  n - local vector length
677 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
678 .  nghost - number of local ghost points
679 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
680 
681    Output Parameter:
682 .  vv - the global vector representation (without ghost points as part of vector)
683 
684    Notes:
685    Use VecGhostGetLocalForm() to access the local, ghosted representation
686    of the vector.
687 
688    This also automatically sets the ISLocalToGlobalMapping() for this vector.
689 
690    Level: advanced
691 
692 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
693           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
694           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
695           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
696 
697 @*/
698 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
699   PetscFunctionBegin;
700   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
701   PetscFunctionReturn(0);
702 }
703 
704 /*@
705    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
706 
707    Collective on Vec
708 
709    Input Parameters:
710 +  vv - the MPI vector
711 .  nghost - number of local ghost points
712 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
713 
714    Notes:
715    Use VecGhostGetLocalForm() to access the local, ghosted representation
716    of the vector.
717 
718    This also automatically sets the ISLocalToGlobalMapping() for this vector.
719 
720    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
721 
722    Level: advanced
723 
724 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
725           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
726           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
727           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
728 
729 @*/
730 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[]) {
731   PetscBool flg;
732 
733   PetscFunctionBegin;
734   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
735   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
736   if (flg) {
737     PetscInt               n, N;
738     Vec_MPI               *w;
739     PetscScalar           *larray;
740     IS                     from, to;
741     ISLocalToGlobalMapping ltog;
742     PetscInt               rstart, i, *indices;
743     MPI_Comm               comm;
744 
745     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
746     n = vv->map->n;
747     N = vv->map->N;
748     PetscUseTypeMethod(vv, destroy);
749     PetscCall(VecSetSizes(vv, n, N));
750     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
751     w = (Vec_MPI *)(vv)->data;
752     /* Create local representation */
753     PetscCall(VecGetArray(vv, &larray));
754     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
755     PetscCall(PetscLogObjectParent((PetscObject)vv, (PetscObject)w->localrep));
756     PetscCall(VecRestoreArray(vv, &larray));
757 
758     /*
759      Create scatter context for scattering (updating) ghost values
760      */
761     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
762     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
763     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
764     PetscCall(PetscLogObjectParent((PetscObject)vv, (PetscObject)w->localupdate));
765     PetscCall(ISDestroy(&to));
766     PetscCall(ISDestroy(&from));
767 
768     /* set local to global mapping for ghosted vector */
769     PetscCall(PetscMalloc1(n + nghost, &indices));
770     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
771 
772     for (i = 0; i < n; i++) indices[i] = rstart + i;
773     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
774 
775     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
776     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
777     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
778   } else {
779     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
780     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
781   }
782   PetscFunctionReturn(0);
783 }
784 
785 /* ------------------------------------------------------------------------------------------*/
786 /*@C
787    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
788    the caller allocates the array space. Indices in the ghost region are based on blocks.
789 
790    Collective
791 
792    Input Parameters:
793 +  comm - the MPI communicator to use
794 .  bs - block size
795 .  n - local vector length
796 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
797 .  nghost - number of local ghost blocks
798 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
799 -  array - the space to store the vector values (as long as n + nghost*bs)
800 
801    Output Parameter:
802 .  vv - the global vector representation (without ghost points as part of vector)
803 
804    Notes:
805    Use VecGhostGetLocalForm() to access the local, ghosted representation
806    of the vector.
807 
808    n is the local vector size (total local size not the number of blocks) while nghost
809    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
810    portion is bs*nghost
811 
812    Level: advanced
813 
814 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
815           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
816           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
817 
818 @*/
819 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
820   Vec_MPI               *w;
821   PetscScalar           *larray;
822   IS                     from, to;
823   ISLocalToGlobalMapping ltog;
824   PetscInt               rstart, i, nb, *indices;
825 
826   PetscFunctionBegin;
827   *vv = NULL;
828 
829   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
830   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
831   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
832   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
833   PetscCall(PetscSplitOwnership(comm, &n, &N));
834   /* Create global representation */
835   PetscCall(VecCreate(comm, vv));
836   PetscCall(VecSetSizes(*vv, n, N));
837   PetscCall(VecSetBlockSize(*vv, bs));
838   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
839   w = (Vec_MPI *)(*vv)->data;
840   /* Create local representation */
841   PetscCall(VecGetArray(*vv, &larray));
842   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
843   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localrep));
844   PetscCall(VecRestoreArray(*vv, &larray));
845 
846   /*
847        Create scatter context for scattering (updating) ghost values
848   */
849   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
850   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
851   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
852   PetscCall(PetscLogObjectParent((PetscObject)*vv, (PetscObject)w->localupdate));
853   PetscCall(ISDestroy(&to));
854   PetscCall(ISDestroy(&from));
855 
856   /* set local to global mapping for ghosted vector */
857   nb = n / bs;
858   PetscCall(PetscMalloc1(nb + nghost, &indices));
859   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
860   rstart = rstart / bs;
861 
862   for (i = 0; i < nb; i++) indices[i] = rstart + i;
863   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
864 
865   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
866   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
867   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
868   PetscFunctionReturn(0);
869 }
870 
871 /*@
872    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
873         The indicing of the ghost points is done with blocks.
874 
875    Collective
876 
877    Input Parameters:
878 +  comm - the MPI communicator to use
879 .  bs - the block size
880 .  n - local vector length
881 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
882 .  nghost - number of local ghost blocks
883 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
884 
885    Output Parameter:
886 .  vv - the global vector representation (without ghost points as part of vector)
887 
888    Notes:
889    Use VecGhostGetLocalForm() to access the local, ghosted representation
890    of the vector.
891 
892    n is the local vector size (total local size not the number of blocks) while nghost
893    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
894    portion is bs*nghost
895 
896    Level: advanced
897 
898 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
899           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
900           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
901 
902 @*/
903 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
904   PetscFunctionBegin;
905   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
906   PetscFunctionReturn(0);
907 }
908