xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision b98e75f9f81e78f593efd2c19514f44db7497d0a)
1 /*
2    This file contains routines for Parallel vector operations.
3  */
4 #include <petscsys.h>
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
6 
7 PETSC_INTERN PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
8 
9 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
10 {
11   Vec_MPI *v = (Vec_MPI *)vin->data;
12 
13   PetscFunctionBegin;
14   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
15   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
16   v->array         = (PetscScalar *)a;
17   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
18   PetscFunctionReturn(PETSC_SUCCESS);
19 }
20 
21 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
22 {
23   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
24   PetscScalar *array;
25 
26   PetscFunctionBegin;
27   PetscCall(VecCreateWithLayout_Private(win->map, v));
28 
29   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
30   vw           = (Vec_MPI *)(*v)->data;
31   (*v)->ops[0] = win->ops[0];
32 
33   /* save local representation of the parallel vector (and scatter) if it exists */
34   if (w->localrep) {
35     PetscCall(VecGetArray(*v, &array));
36     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
37     vw->localrep->ops[0] = w->localrep->ops[0];
38     PetscCall(VecRestoreArray(*v, &array));
39 
40     vw->localupdate = w->localupdate;
41     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
42 
43     vw->ghost = w->ghost;
44     if (vw->ghost) PetscCall(PetscObjectReference((PetscObject)vw->ghost));
45   }
46 
47   /* New vector should inherit stashing property of parent */
48   (*v)->stash.donotstash   = win->stash.donotstash;
49   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
50 
51   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
52   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
53 
54   (*v)->bstash.bs = win->bstash.bs;
55   PetscFunctionReturn(PETSC_SUCCESS);
56 }
57 
58 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
59 {
60   Vec_MPI *v = (Vec_MPI *)V->data;
61 
62   PetscFunctionBegin;
63   switch (op) {
64   case VEC_IGNORE_OFF_PROC_ENTRIES:
65     V->stash.donotstash = flag;
66     break;
67   case VEC_IGNORE_NEGATIVE_INDICES:
68     V->stash.ignorenegidx = flag;
69     break;
70   case VEC_SUBSET_OFF_PROC_ENTRIES:
71     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
72     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
73       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
74       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
75     }
76     break;
77   }
78 
79   PetscFunctionReturn(PETSC_SUCCESS);
80 }
81 
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI *v = (Vec_MPI *)vin->data;
85 
86   PetscFunctionBegin;
87   v->array         = v->unplacedarray;
88   v->unplacedarray = NULL;
89   if (v->localrep) PetscCall(VecResetArray(v->localrep));
90   PetscFunctionReturn(PETSC_SUCCESS);
91 }
92 
93 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
94 {
95   Vec                X   = (Vec)ctx;
96   Vec_MPI           *x   = (Vec_MPI *)X->data;
97   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
98   PetscInt           bs  = X->map->bs;
99 
100   PetscFunctionBegin;
101   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
102      messages can be empty, but we have to send them this time if we sent them before because the
103      receiver is expecting them.
104    */
105   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
106     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
107     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
108   }
109   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
110     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
111     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
112   }
113   PetscFunctionReturn(PETSC_SUCCESS);
114 }
115 
116 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
117 {
118   Vec                X   = (Vec)ctx;
119   Vec_MPI           *x   = (Vec_MPI *)X->data;
120   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
121   PetscInt           bs  = X->map->bs;
122   VecAssemblyFrame  *frame;
123 
124   PetscFunctionBegin;
125   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
126 
127   if (hdr->count) {
128     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
129     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
130     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
131     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
132     frame->pendings = 2;
133   } else {
134     frame->ints     = NULL;
135     frame->scalars  = NULL;
136     frame->pendings = 0;
137   }
138 
139   if (hdr->bcount) {
140     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
141     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
142     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
143     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
144     frame->pendingb = 2;
145   } else {
146     frame->intb     = NULL;
147     frame->scalarb  = NULL;
148     frame->pendingb = 0;
149   }
150   PetscFunctionReturn(PETSC_SUCCESS);
151 }
152 
153 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
154 {
155   Vec_MPI *x = (Vec_MPI *)X->data;
156   MPI_Comm comm;
157   PetscInt i, j, jb, bs;
158 
159   PetscFunctionBegin;
160   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
161 
162   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
163   PetscCall(VecGetBlockSize(X, &bs));
164   if (PetscDefined(USE_DEBUG)) {
165     InsertMode addv;
166     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
167     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
168   }
169   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
170 
171   PetscCall(VecStashSortCompress_Private(&X->stash));
172   PetscCall(VecStashSortCompress_Private(&X->bstash));
173 
174   if (!x->sendranks) {
175     PetscMPIInt nowners, bnowners, *owners, *bowners;
176     PetscInt    ntmp;
177     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
178     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
179     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
180     x->nsendranks = ntmp;
181     PetscCall(PetscFree(owners));
182     PetscCall(PetscFree(bowners));
183     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
184     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
185   }
186   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
187     PetscMPIInt rank         = x->sendranks[i];
188     x->sendhdr[i].insertmode = X->stash.insertmode;
189     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
190      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
191      * there is nothing new to send, so that size-zero messages get sent instead. */
192     x->sendhdr[i].count = 0;
193     if (X->stash.n) {
194       x->sendptrs[i].ints    = &X->stash.idx[j];
195       x->sendptrs[i].scalars = &X->stash.array[j];
196       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
197     }
198     x->sendhdr[i].bcount = 0;
199     if (X->bstash.n) {
200       x->sendptrs[i].intb    = &X->bstash.idx[jb];
201       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
202       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
203     }
204   }
205 
206   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
207   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
208   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
209   if (x->first_assembly_done) { /* this is not the first assembly */
210     PetscMPIInt tag[4];
211     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
212     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
213     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
214     x->use_status = PETSC_TRUE;
215   } else { /* First time assembly */
216     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
217     x->use_status = PETSC_FALSE;
218   }
219 
220   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
221      This line says when assembly_subset is set, then we mark that the first assembly is done.
222    */
223   x->first_assembly_done = x->assembly_subset;
224 
225   {
226     PetscInt nstash, reallocs;
227     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
228     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
229     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
230     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
231   }
232   PetscFunctionReturn(PETSC_SUCCESS);
233 }
234 
235 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
236 {
237   Vec_MPI          *x  = (Vec_MPI *)X->data;
238   PetscInt          bs = X->map->bs;
239   PetscMPIInt       npending, *some_indices, r;
240   MPI_Status       *some_statuses;
241   PetscScalar      *xarray;
242   VecAssemblyFrame *frame;
243 
244   PetscFunctionBegin;
245   if (X->stash.donotstash) {
246     X->stash.insertmode  = NOT_SET_VALUES;
247     X->bstash.insertmode = NOT_SET_VALUES;
248     PetscFunctionReturn(PETSC_SUCCESS);
249   }
250 
251   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
252   PetscCall(VecGetArray(X, &xarray));
253   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
254   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
255   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
256   while (npending > 0) {
257     PetscMPIInt ndone = 0, ii;
258     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
259      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
260      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
261      * subsequent assembly can set a proper subset of the values. */
262     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
263     for (ii = 0; ii < ndone; ii++) {
264       PetscInt     i     = some_indices[ii] / 4, j, k;
265       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
266       PetscInt    *recvint;
267       PetscScalar *recvscalar;
268       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
269       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
270       npending--;
271       if (!blockmsg) { /* Scalar stash */
272         PetscMPIInt count;
273         if (--frame[i].pendings > 0) continue;
274         if (x->use_status) {
275           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
276         } else count = x->recvhdr[i].count;
277         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
278           PetscInt loc = *recvint - X->map->rstart;
279           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
280           switch (imode) {
281           case ADD_VALUES:
282             xarray[loc] += *recvscalar++;
283             break;
284           case INSERT_VALUES:
285             xarray[loc] = *recvscalar++;
286             break;
287           default:
288             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
289           }
290         }
291       } else { /* Block stash */
292         PetscMPIInt count;
293         if (--frame[i].pendingb > 0) continue;
294         if (x->use_status) {
295           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
296           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
297         } else count = x->recvhdr[i].bcount;
298         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
299           PetscInt loc = (*recvint) * bs - X->map->rstart;
300           switch (imode) {
301           case ADD_VALUES:
302             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
306             break;
307           default:
308             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
309           }
310         }
311       }
312     }
313   }
314   PetscCall(VecRestoreArray(X, &xarray));
315   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
316   PetscCall(PetscFree2(some_indices, some_statuses));
317   if (x->assembly_subset) {
318     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
319     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
320   } else {
321     PetscCall(VecAssemblyReset_MPI(X));
322   }
323 
324   X->stash.insertmode  = NOT_SET_VALUES;
325   X->bstash.insertmode = NOT_SET_VALUES;
326   PetscCall(VecStashScatterEnd_Private(&X->stash));
327   PetscCall(VecStashScatterEnd_Private(&X->bstash));
328   PetscFunctionReturn(PETSC_SUCCESS);
329 }
330 
331 PetscErrorCode VecAssemblyReset_MPI(Vec X)
332 {
333   Vec_MPI *x = (Vec_MPI *)X->data;
334 
335   PetscFunctionBegin;
336   PetscCall(PetscFree(x->sendreqs));
337   PetscCall(PetscFree(x->recvreqs));
338   PetscCall(PetscFree(x->sendranks));
339   PetscCall(PetscFree(x->recvranks));
340   PetscCall(PetscFree(x->sendhdr));
341   PetscCall(PetscFree(x->recvhdr));
342   PetscCall(PetscFree(x->sendptrs));
343   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
344   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
346   PetscFunctionReturn(PETSC_SUCCESS);
347 }
348 
349 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
350 {
351 #if !defined(PETSC_HAVE_MPIUNI)
352   PetscBool flg = PETSC_FALSE, set;
353 
354   PetscFunctionBegin;
355   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
356   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
357   if (set) {
358     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
359     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
360   }
361   PetscOptionsHeadEnd();
362 #else
363   PetscFunctionBegin;
364   X->ops->assemblybegin = VecAssemblyBegin_MPI;
365   X->ops->assemblyend   = VecAssemblyEnd_MPI;
366 #endif
367   PetscFunctionReturn(PETSC_SUCCESS);
368 }
369 
370 static PetscErrorCode VecGetLocalToGlobalMapping_MPI_VecGhost(Vec X, ISLocalToGlobalMapping *ltg)
371 {
372   PetscInt       *indices, n, nghost, rstart, i;
373   IS              ghostis;
374   const PetscInt *ghostidx;
375   MPI_Comm        comm;
376 
377   PetscFunctionBegin;
378   if (X->map->mapping) {
379     *ltg = X->map->mapping;
380     PetscFunctionReturn(PETSC_SUCCESS);
381   }
382   PetscCall(VecGhostGetGhostIS(X, &ghostis));
383   PetscCall(ISGetLocalSize(ghostis, &nghost));
384   PetscCall(VecGetLocalSize(X, &n));
385   PetscCall(ISGetIndices(ghostis, &ghostidx));
386   /* set local to global mapping for ghosted vector */
387   PetscCall(PetscMalloc1(n + nghost, &indices));
388   PetscCall(VecGetOwnershipRange(X, &rstart, NULL));
389   for (i = 0; i < n; i++) { indices[i] = rstart + i; }
390   for (i = 0; i < nghost; i++) { indices[n + i] = ghostidx[i]; }
391   PetscCall(ISRestoreIndices(ghostis, &ghostidx));
392   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
393   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &X->map->mapping));
394   *ltg = X->map->mapping;
395   PetscFunctionReturn(PETSC_SUCCESS);
396 }
397 
398 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
399                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
400                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
401                                PetscDesignatedInitializer(dot, VecDot_MPI),
402                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
403                                PetscDesignatedInitializer(norm, VecNorm_MPI),
404                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
405                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
406                                PetscDesignatedInitializer(scale, VecScale_Seq),
407                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
408                                PetscDesignatedInitializer(set, VecSet_Seq),
409                                PetscDesignatedInitializer(swap, VecSwap_Seq),
410                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
411                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
412                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
413                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
414                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
415                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
416                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
417                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
418                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
419                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
420                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
421                                PetscDesignatedInitializer(getarray, NULL),
422                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
423                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
424                                PetscDesignatedInitializer(restorearray, NULL),
425                                PetscDesignatedInitializer(max, VecMax_MPI),
426                                PetscDesignatedInitializer(min, VecMin_MPI),
427                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
428                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
429                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
430                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
431                                PetscDesignatedInitializer(view, VecView_MPI),
432                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
433                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
434                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
435                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
436                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
437                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
438                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
439                                PetscDesignatedInitializer(load, VecLoad_Default),
440                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
441                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
442                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
443                                PetscDesignatedInitializer(getlocaltoglobalmapping, VecGetLocalToGlobalMapping_MPI_VecGhost),
444                                PetscDesignatedInitializer(setvalueslocal, NULL),
445                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
446                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
447                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
448                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
449                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
450                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
451                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
452                                PetscDesignatedInitializer(sqrt, NULL),
453                                PetscDesignatedInitializer(abs, NULL),
454                                PetscDesignatedInitializer(exp, NULL),
455                                PetscDesignatedInitializer(log, NULL),
456                                PetscDesignatedInitializer(shift, NULL),
457                                PetscDesignatedInitializer(create, NULL), /* really? */
458                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
459                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
460                                PetscDesignatedInitializer(dotnorm2, NULL),
461                                PetscDesignatedInitializer(getsubvector, NULL),
462                                PetscDesignatedInitializer(restoresubvector, NULL),
463                                PetscDesignatedInitializer(getarrayread, NULL),
464                                PetscDesignatedInitializer(restorearrayread, NULL),
465                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
466                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
467                                PetscDesignatedInitializer(viewnative, VecView_MPI),
468                                PetscDesignatedInitializer(loadnative, NULL),
469                                PetscDesignatedInitializer(createlocalvector, NULL),
470                                PetscDesignatedInitializer(getlocalvector, NULL),
471                                PetscDesignatedInitializer(restorelocalvector, NULL),
472                                PetscDesignatedInitializer(getlocalvectorread, NULL),
473                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
474                                PetscDesignatedInitializer(bindtocpu, NULL),
475                                PetscDesignatedInitializer(getarraywrite, NULL),
476                                PetscDesignatedInitializer(restorearraywrite, NULL),
477                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
478                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
479                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
480                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
481                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
482                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
483                                PetscDesignatedInitializer(concatenate, NULL),
484                                PetscDesignatedInitializer(sum, NULL),
485                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
486                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI),
487                                PetscDesignatedInitializer(errorwnorm, NULL)};
488 
489 /*
490     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
491     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
492     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
493 
494     If alloc is true and array is NULL then this routine allocates the space, otherwise
495     no space is allocated.
496 */
497 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
498 {
499   Vec_MPI *s;
500 
501   PetscFunctionBegin;
502   PetscCall(PetscNew(&s));
503   v->data        = (void *)s;
504   v->ops[0]      = DvOps;
505   s->nghost      = nghost;
506   v->petscnative = PETSC_TRUE;
507   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
508 
509   PetscCall(PetscLayoutSetUp(v->map));
510 
511   s->array           = (PetscScalar *)array;
512   s->array_allocated = NULL;
513   if (alloc && !array) {
514     PetscInt n = v->map->n + nghost;
515     PetscCall(PetscCalloc1(n, &s->array));
516     s->array_allocated = s->array;
517     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_2], 0));
518     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_1], 0));
519     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_INFINITY], 0));
520   }
521 
522   /* By default parallel vectors do not have local representation */
523   s->localrep    = NULL;
524   s->localupdate = NULL;
525   s->ghost       = NULL;
526 
527   v->stash.insertmode  = NOT_SET_VALUES;
528   v->bstash.insertmode = NOT_SET_VALUES;
529   /* create the stashes. The block-size for bstash is set later when
530      VecSetValuesBlocked is called.
531   */
532   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
533   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
534 
535 #if defined(PETSC_HAVE_MATLAB)
536   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
537   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
538 #endif
539   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
540   PetscFunctionReturn(PETSC_SUCCESS);
541 }
542 
543 /*MC
544    VECMPI - VECMPI = "mpi" - The basic parallel vector
545 
546    Options Database Key:
547 . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`
548 
549   Level: beginner
550 
551 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
552 M*/
553 
554 PetscErrorCode VecCreate_MPI(Vec vv)
555 {
556   PetscFunctionBegin;
557   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
558   PetscFunctionReturn(PETSC_SUCCESS);
559 }
560 
561 /*MC
562    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process
563 
564    Options Database Key:
565 . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`
566 
567   Level: beginner
568 
569 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
570 M*/
571 
572 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
573 {
574   PetscMPIInt size;
575 
576   PetscFunctionBegin;
577   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
578   if (size == 1) {
579     PetscCall(VecSetType(v, VECSEQ));
580   } else {
581     PetscCall(VecSetType(v, VECMPI));
582   }
583   PetscFunctionReturn(PETSC_SUCCESS);
584 }
585 
586 /*@C
587   VecCreateMPIWithArray - Creates a parallel, array-style vector,
588   where the user provides the array space to store the vector values.
589 
590   Collective
591 
592   Input Parameters:
593 + comm  - the MPI communicator to use
594 . bs    - block size, same meaning as `VecSetBlockSize()`
595 . n     - local vector length, cannot be `PETSC_DECIDE`
596 . N     - global vector length (or `PETSC_DETERMINE` to have calculated)
597 - array - the user provided array to store the vector values
598 
599   Output Parameter:
600 . vv - the vector
601 
602   Level: intermediate
603 
604   Notes:
605   Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
606   same type as an existing vector.
607 
608   If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
609   at a later stage to SET the array for storing the vector values.
610 
611   PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.
612 
613   The user should not free `array` until the vector is destroyed.
614 
615 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
616           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
617 @*/
618 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
619 {
620   PetscFunctionBegin;
621   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
622   PetscCall(PetscSplitOwnership(comm, &n, &N));
623   PetscCall(VecCreate(comm, vv));
624   PetscCall(VecSetSizes(*vv, n, N));
625   PetscCall(VecSetBlockSize(*vv, bs));
626   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
627   PetscFunctionReturn(PETSC_SUCCESS);
628 }
629 
630 /*@C
631   VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
632   the caller allocates the array space.
633 
634   Collective
635 
636   Input Parameters:
637 + comm   - the MPI communicator to use
638 . n      - local vector length
639 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
640 . nghost - number of local ghost points
641 . ghosts - global indices of ghost points (or `NULL` if not needed), these do not need to be in increasing order (sorted)
642 - array  - the space to store the vector values (as long as n + nghost)
643 
644   Output Parameter:
645 . vv - the global vector representation (without ghost points as part of vector)
646 
647   Level: advanced
648 
649   Notes:
650   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
651   of the vector.
652 
653   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
654 
655 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
656           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
657           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
658 @*/
659 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
660 {
661   Vec_MPI     *w;
662   PetscScalar *larray;
663   IS           from, to;
664 
665   PetscFunctionBegin;
666   *vv = NULL;
667 
668   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
669   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
670   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
671   PetscCall(PetscSplitOwnership(comm, &n, &N));
672   /* Create global representation */
673   PetscCall(VecCreate(comm, vv));
674   PetscCall(VecSetSizes(*vv, n, N));
675   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
676   w = (Vec_MPI *)(*vv)->data;
677   /* Create local representation */
678   PetscCall(VecGetArray(*vv, &larray));
679   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
680   PetscCall(VecRestoreArray(*vv, &larray));
681 
682   /*
683        Create scatter context for scattering (updating) ghost values
684   */
685   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
686   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
687   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
688   PetscCall(ISDestroy(&to));
689 
690   w->ghost                            = from;
691   (*vv)->ops->getlocaltoglobalmapping = VecGetLocalToGlobalMapping_MPI_VecGhost;
692   PetscFunctionReturn(PETSC_SUCCESS);
693 }
694 
695 /*@
696   VecGhostGetGhostIS - Return ghosting indices of a ghost vector
697 
698   Input Parameters:
699 . X - ghost vector
700 
701   Output Parameter:
702 . ghost - ghosting indices
703 
704   Level: beginner
705 
706 .seealso: `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`
707 @*/
708 PetscErrorCode VecGhostGetGhostIS(Vec X, IS *ghost)
709 {
710   Vec_MPI  *w;
711   PetscBool flg;
712 
713   PetscFunctionBegin;
714   PetscValidType(X, 1);
715   PetscAssertPointer(ghost, 2);
716   PetscCall(PetscObjectTypeCompare((PetscObject)X, VECMPI, &flg));
717   PetscCheck(flg, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_WRONGSTATE, "VecGhostGetGhostIS was not supported for vec type %s\n", ((PetscObject)X)->type_name);
718   w      = (Vec_MPI *)(X)->data;
719   *ghost = w->ghost;
720   PetscFunctionReturn(PETSC_SUCCESS);
721 }
722 
723 /*@
724   VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
725 
726   Collective
727 
728   Input Parameters:
729 + comm   - the MPI communicator to use
730 . n      - local vector length
731 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
732 . nghost - number of local ghost points
733 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
734 
735   Output Parameter:
736 . vv - the global vector representation (without ghost points as part of vector)
737 
738   Level: advanced
739 
740   Notes:
741   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
742   of the vector.
743 
744   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
745 
746 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
747           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
748           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
749           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
750 
751 @*/
752 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
753 {
754   PetscFunctionBegin;
755   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
756   PetscFunctionReturn(PETSC_SUCCESS);
757 }
758 
759 /*@
760   VecMPISetGhost - Sets the ghost points for an MPI ghost vector
761 
762   Collective
763 
764   Input Parameters:
765 + vv     - the MPI vector
766 . nghost - number of local ghost points
767 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
768 
769   Level: advanced
770 
771   Notes:
772   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
773   of the vector.
774 
775   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
776 
777   You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).
778 
779 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
780           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
781           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
782           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
783 @*/
784 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
785 {
786   PetscBool flg;
787 
788   PetscFunctionBegin;
789   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
790   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
791   if (flg) {
792     PetscInt     n, N;
793     Vec_MPI     *w;
794     PetscScalar *larray;
795     IS           from, to;
796     MPI_Comm     comm;
797 
798     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
799     n = vv->map->n;
800     N = vv->map->N;
801     PetscUseTypeMethod(vv, destroy);
802     PetscCall(VecSetSizes(vv, n, N));
803     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
804     w = (Vec_MPI *)(vv)->data;
805     /* Create local representation */
806     PetscCall(VecGetArray(vv, &larray));
807     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
808     PetscCall(VecRestoreArray(vv, &larray));
809 
810     /*
811      Create scatter context for scattering (updating) ghost values
812      */
813     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
814     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
815     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
816     PetscCall(ISDestroy(&to));
817 
818     w->ghost                         = from;
819     vv->ops->getlocaltoglobalmapping = VecGetLocalToGlobalMapping_MPI_VecGhost;
820   } else {
821     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
822     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
823   }
824   PetscFunctionReturn(PETSC_SUCCESS);
825 }
826 
827 /* ------------------------------------------------------------------------------------------*/
828 /*@C
829   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
830   the caller allocates the array space. Indices in the ghost region are based on blocks.
831 
832   Collective
833 
834   Input Parameters:
835 + comm   - the MPI communicator to use
836 . bs     - block size
837 . n      - local vector length
838 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
839 . nghost - number of local ghost blocks
840 . ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
841 - array  - the space to store the vector values (as long as `n + nghost*bs`)
842 
843   Output Parameter:
844 . vv - the global vector representation (without ghost points as part of vector)
845 
846   Level: advanced
847 
848   Notes:
849   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
850   of the vector.
851 
852   n is the local vector size (total local size not the number of blocks) while nghost
853   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
854   portion is bs*nghost
855 
856 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
857           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
858           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
859 @*/
860 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
861 {
862   Vec_MPI               *w;
863   PetscScalar           *larray;
864   IS                     from, to;
865   ISLocalToGlobalMapping ltog;
866   PetscInt               rstart, i, nb, *indices;
867 
868   PetscFunctionBegin;
869   *vv = NULL;
870 
871   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
872   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
873   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
874   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
875   PetscCall(PetscSplitOwnership(comm, &n, &N));
876   /* Create global representation */
877   PetscCall(VecCreate(comm, vv));
878   PetscCall(VecSetSizes(*vv, n, N));
879   PetscCall(VecSetBlockSize(*vv, bs));
880   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
881   w = (Vec_MPI *)(*vv)->data;
882   /* Create local representation */
883   PetscCall(VecGetArray(*vv, &larray));
884   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
885   PetscCall(VecRestoreArray(*vv, &larray));
886 
887   /*
888        Create scatter context for scattering (updating) ghost values
889   */
890   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
891   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
892   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
893   PetscCall(ISDestroy(&to));
894   PetscCall(ISDestroy(&from));
895 
896   /* set local to global mapping for ghosted vector */
897   nb = n / bs;
898   PetscCall(PetscMalloc1(nb + nghost, &indices));
899   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
900   rstart = rstart / bs;
901 
902   for (i = 0; i < nb; i++) indices[i] = rstart + i;
903   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
904 
905   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
906   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
907   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
908   PetscFunctionReturn(PETSC_SUCCESS);
909 }
910 
911 /*@
912   VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
913   The indicing of the ghost points is done with blocks.
914 
915   Collective
916 
917   Input Parameters:
918 + comm   - the MPI communicator to use
919 . bs     - the block size
920 . n      - local vector length
921 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
922 . nghost - number of local ghost blocks
923 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
924 
925   Output Parameter:
926 . vv - the global vector representation (without ghost points as part of vector)
927 
928   Level: advanced
929 
930   Notes:
931   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
932   of the vector.
933 
934   `n` is the local vector size (total local size not the number of blocks) while `nghost`
935   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
936   portion is `bs*nghost`
937 
938 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
939           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
940           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
941 @*/
942 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
943 {
944   PetscFunctionBegin;
945   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
946   PetscFunctionReturn(PETSC_SUCCESS);
947 }
948