xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision f78929e5db2b9b9986f2e298899582a331e9dd49)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
9   PetscScalar sum, work;
10 
11   PetscFunctionBegin;
12   PetscCall(VecDot_Seq(xin, yin, &work));
13   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
14   *z = sum;
15   PetscFunctionReturn(0);
16 }
17 
18 PetscErrorCode VecTDot_MPI(Vec xin, Vec yin, PetscScalar *z) {
19   PetscScalar sum, work;
20 
21   PetscFunctionBegin;
22   PetscCall(VecTDot_Seq(xin, yin, &work));
23   PetscCall(MPIU_Allreduce(&work, &sum, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
24   *z = sum;
25   PetscFunctionReturn(0);
26 }
27 
28 extern PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
29 
30 static PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a) {
31   Vec_MPI *v = (Vec_MPI *)vin->data;
32 
33   PetscFunctionBegin;
34   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
35   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
36   v->array         = (PetscScalar *)a;
37   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
38   PetscFunctionReturn(0);
39 }
40 
41 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v) {
42   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
43   PetscScalar *array;
44 
45   PetscFunctionBegin;
46   PetscCall(VecCreate(PetscObjectComm((PetscObject)win), v));
47   PetscCall(PetscLayoutReference(win->map, &(*v)->map));
48 
49   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
50   vw = (Vec_MPI *)(*v)->data;
51   PetscCall(PetscMemcpy((*v)->ops, win->ops, sizeof(struct _VecOps)));
52 
53   /* save local representation of the parallel vector (and scatter) if it exists */
54   if (w->localrep) {
55     PetscCall(VecGetArray(*v, &array));
56     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
57     PetscCall(PetscMemcpy(vw->localrep->ops, w->localrep->ops, sizeof(struct _VecOps)));
58     PetscCall(VecRestoreArray(*v, &array));
59 
60     vw->localupdate = w->localupdate;
61     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
62   }
63 
64   /* New vector should inherit stashing property of parent */
65   (*v)->stash.donotstash   = win->stash.donotstash;
66   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
67 
68   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
69   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
70 
71   (*v)->map->bs   = PetscAbs(win->map->bs);
72   (*v)->bstash.bs = win->bstash.bs;
73   PetscFunctionReturn(0);
74 }
75 
76 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag) {
77   Vec_MPI *v = (Vec_MPI *)V->data;
78 
79   PetscFunctionBegin;
80   switch (op) {
81   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag; break;
82   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag; break;
83   case VEC_SUBSET_OFF_PROC_ENTRIES:
84     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
85     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
86       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
87       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
88     }
89     break;
90   }
91 
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode VecResetArray_MPI(Vec vin) {
96   Vec_MPI *v = (Vec_MPI *)vin->data;
97 
98   PetscFunctionBegin;
99   v->array         = v->unplacedarray;
100   v->unplacedarray = NULL;
101   if (v->localrep) PetscCall(VecResetArray(v->localrep));
102   PetscFunctionReturn(0);
103 }
104 
105 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx) {
106   Vec                X   = (Vec)ctx;
107   Vec_MPI           *x   = (Vec_MPI *)X->data;
108   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
109   PetscInt           bs  = X->map->bs;
110 
111   PetscFunctionBegin;
112   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
113      messages can be empty, but we have to send them this time if we sent them before because the
114      receiver is expecting them.
115    */
116   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
117     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
118     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
119   }
120   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
121     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
122     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
123   }
124   PetscFunctionReturn(0);
125 }
126 
127 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx) {
128   Vec                X   = (Vec)ctx;
129   Vec_MPI           *x   = (Vec_MPI *)X->data;
130   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
131   PetscInt           bs  = X->map->bs;
132   VecAssemblyFrame  *frame;
133 
134   PetscFunctionBegin;
135   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
136 
137   if (hdr->count) {
138     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
139     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
140     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
141     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
142     frame->pendings = 2;
143   } else {
144     frame->ints     = NULL;
145     frame->scalars  = NULL;
146     frame->pendings = 0;
147   }
148 
149   if (hdr->bcount) {
150     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
151     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
152     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
153     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
154     frame->pendingb = 2;
155   } else {
156     frame->intb     = NULL;
157     frame->scalarb  = NULL;
158     frame->pendingb = 0;
159   }
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X) {
164   Vec_MPI *x = (Vec_MPI *)X->data;
165   MPI_Comm comm;
166   PetscInt i, j, jb, bs;
167 
168   PetscFunctionBegin;
169   if (X->stash.donotstash) PetscFunctionReturn(0);
170 
171   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
172   PetscCall(VecGetBlockSize(X, &bs));
173   if (PetscDefined(USE_DEBUG)) {
174     InsertMode addv;
175     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
176     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
177   }
178   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
179 
180   PetscCall(VecStashSortCompress_Private(&X->stash));
181   PetscCall(VecStashSortCompress_Private(&X->bstash));
182 
183   if (!x->sendranks) {
184     PetscMPIInt nowners, bnowners, *owners, *bowners;
185     PetscInt    ntmp;
186     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
187     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
188     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
189     x->nsendranks = ntmp;
190     PetscCall(PetscFree(owners));
191     PetscCall(PetscFree(bowners));
192     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
193     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
194   }
195   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
196     PetscMPIInt rank         = x->sendranks[i];
197     x->sendhdr[i].insertmode = X->stash.insertmode;
198     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
199      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
200      * there is nothing new to send, so that size-zero messages get sent instead. */
201     x->sendhdr[i].count      = 0;
202     if (X->stash.n) {
203       x->sendptrs[i].ints    = &X->stash.idx[j];
204       x->sendptrs[i].scalars = &X->stash.array[j];
205       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
206     }
207     x->sendhdr[i].bcount = 0;
208     if (X->bstash.n) {
209       x->sendptrs[i].intb    = &X->bstash.idx[jb];
210       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
211       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
212     }
213   }
214 
215   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
216   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
217   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
218   if (x->first_assembly_done) { /* this is not the first assembly */
219     PetscMPIInt tag[4];
220     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
221     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
222     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
223     x->use_status = PETSC_TRUE;
224   } else { /* First time assembly */
225     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
226     x->use_status = PETSC_FALSE;
227   }
228 
229   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
230      This line says when assembly_subset is set, then we mark that the first assembly is done.
231    */
232   x->first_assembly_done = x->assembly_subset;
233 
234   {
235     PetscInt nstash, reallocs;
236     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
237     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
238     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
239     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
240   }
241   PetscFunctionReturn(0);
242 }
243 
244 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X) {
245   Vec_MPI          *x  = (Vec_MPI *)X->data;
246   PetscInt          bs = X->map->bs;
247   PetscMPIInt       npending, *some_indices, r;
248   MPI_Status       *some_statuses;
249   PetscScalar      *xarray;
250   VecAssemblyFrame *frame;
251 
252   PetscFunctionBegin;
253   if (X->stash.donotstash) {
254     X->stash.insertmode  = NOT_SET_VALUES;
255     X->bstash.insertmode = NOT_SET_VALUES;
256     PetscFunctionReturn(0);
257   }
258 
259   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
260   PetscCall(VecGetArray(X, &xarray));
261   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
262   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
263   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
264   while (npending > 0) {
265     PetscMPIInt ndone = 0, ii;
266     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
267      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
268      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
269      * subsequent assembly can set a proper subset of the values. */
270     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
271     for (ii = 0; ii < ndone; ii++) {
272       PetscInt     i     = some_indices[ii] / 4, j, k;
273       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
274       PetscInt    *recvint;
275       PetscScalar *recvscalar;
276       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
277       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
278       npending--;
279       if (!blockmsg) { /* Scalar stash */
280         PetscMPIInt count;
281         if (--frame[i].pendings > 0) continue;
282         if (x->use_status) {
283           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
284         } else count = x->recvhdr[i].count;
285         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
286           PetscInt loc = *recvint - X->map->rstart;
287           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
288           switch (imode) {
289           case ADD_VALUES: xarray[loc] += *recvscalar++; break;
290           case INSERT_VALUES: xarray[loc] = *recvscalar++; break;
291           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
292           }
293         }
294       } else { /* Block stash */
295         PetscMPIInt count;
296         if (--frame[i].pendingb > 0) continue;
297         if (x->use_status) {
298           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
299           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
300         } else count = x->recvhdr[i].bcount;
301         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
302           PetscInt loc = (*recvint) * bs - X->map->rstart;
303           switch (imode) {
304           case ADD_VALUES:
305             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
306             break;
307           case INSERT_VALUES:
308             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
309             break;
310           default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
311           }
312         }
313       }
314     }
315   }
316   PetscCall(VecRestoreArray(X, &xarray));
317   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
318   PetscCall(PetscFree2(some_indices, some_statuses));
319   if (x->assembly_subset) {
320     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
321     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
322   } else {
323     PetscCall(VecAssemblyReset_MPI(X));
324   }
325 
326   X->stash.insertmode  = NOT_SET_VALUES;
327   X->bstash.insertmode = NOT_SET_VALUES;
328   PetscCall(VecStashScatterEnd_Private(&X->stash));
329   PetscCall(VecStashScatterEnd_Private(&X->bstash));
330   PetscFunctionReturn(0);
331 }
332 
333 PetscErrorCode VecAssemblyReset_MPI(Vec X) {
334   Vec_MPI *x = (Vec_MPI *)X->data;
335 
336   PetscFunctionBegin;
337   PetscCall(PetscFree(x->sendreqs));
338   PetscCall(PetscFree(x->recvreqs));
339   PetscCall(PetscFree(x->sendranks));
340   PetscCall(PetscFree(x->recvranks));
341   PetscCall(PetscFree(x->sendhdr));
342   PetscCall(PetscFree(x->recvhdr));
343   PetscCall(PetscFree(x->sendptrs));
344   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
345   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
346   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
347   PetscFunctionReturn(0);
348 }
349 
350 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject) {
351 #if !defined(PETSC_HAVE_MPIUNI)
352   PetscBool flg = PETSC_FALSE, set;
353 
354   PetscFunctionBegin;
355   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
356   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
357   if (set) {
358     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
359     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
360   }
361   PetscOptionsHeadEnd();
362 #else
363   PetscFunctionBegin;
364   X->ops->assemblybegin = VecAssemblyBegin_MPI;
365   X->ops->assemblyend   = VecAssemblyEnd_MPI;
366 #endif
367   PetscFunctionReturn(0);
368 }
369 
370 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
371                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
372                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
373                                PetscDesignatedInitializer(dot, VecDot_MPI),
374                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
375                                PetscDesignatedInitializer(norm, VecNorm_MPI),
376                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
377                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
378                                PetscDesignatedInitializer(scale, VecScale_Seq),
379                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
380                                PetscDesignatedInitializer(set, VecSet_Seq),
381                                PetscDesignatedInitializer(swap, VecSwap_Seq),
382                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
383                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
384                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
385                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
386                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
387                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
388                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
389                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
390                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
391                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
392                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
393                                PetscDesignatedInitializer(getarray, NULL),
394                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
395                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
396                                PetscDesignatedInitializer(restorearray, NULL),
397                                PetscDesignatedInitializer(max, VecMax_MPI),
398                                PetscDesignatedInitializer(min, VecMin_MPI),
399                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
400                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
401                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
402                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
403                                PetscDesignatedInitializer(view, VecView_MPI),
404                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
405                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
406                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
407                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
408                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
409                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
410                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
411                                PetscDesignatedInitializer(load, VecLoad_Default),
412                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
413                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
414                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
415                                PetscDesignatedInitializer(setvalueslocal, NULL),
416                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
417                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
418                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
419                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
420                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
421                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
422                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
423                                PetscDesignatedInitializer(sqrt, NULL),
424                                PetscDesignatedInitializer(abs, NULL),
425                                PetscDesignatedInitializer(exp, NULL),
426                                PetscDesignatedInitializer(log, NULL),
427                                PetscDesignatedInitializer(shift, NULL),
428                                PetscDesignatedInitializer(create, NULL), /* really? */
429                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
430                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
431                                PetscDesignatedInitializer(dotnorm2, NULL),
432                                PetscDesignatedInitializer(getsubvector, NULL),
433                                PetscDesignatedInitializer(restoresubvector, NULL),
434                                PetscDesignatedInitializer(getarrayread, NULL),
435                                PetscDesignatedInitializer(restorearrayread, NULL),
436                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
437                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
438                                PetscDesignatedInitializer(viewnative, VecView_MPI),
439                                PetscDesignatedInitializer(loadnative, NULL),
440                                PetscDesignatedInitializer(createlocalvector, NULL),
441                                PetscDesignatedInitializer(getlocalvector, NULL),
442                                PetscDesignatedInitializer(restorelocalvector, NULL),
443                                PetscDesignatedInitializer(getlocalvectorread, NULL),
444                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
445                                PetscDesignatedInitializer(bindtocpu, NULL),
446                                PetscDesignatedInitializer(getarraywrite, NULL),
447                                PetscDesignatedInitializer(restorearraywrite, NULL),
448                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
449                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
450                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
451                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
452                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
453                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
454                                PetscDesignatedInitializer(concatenate, NULL),
455                                PetscDesignatedInitializer(sum, NULL),
456                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
457                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI)};
458 
459 /*
460     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
461     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
462     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
463 
464     If alloc is true and array is NULL then this routine allocates the space, otherwise
465     no space is allocated.
466 */
467 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[]) {
468   Vec_MPI *s;
469 
470   PetscFunctionBegin;
471   PetscCall(PetscNew(&s));
472   v->data = (void *)s;
473   PetscCall(PetscMemcpy(v->ops, &DvOps, sizeof(DvOps)));
474   s->nghost      = nghost;
475   v->petscnative = PETSC_TRUE;
476   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
477 
478   PetscCall(PetscLayoutSetUp(v->map));
479 
480   s->array           = (PetscScalar *)array;
481   s->array_allocated = NULL;
482   if (alloc && !array) {
483     PetscInt n = v->map->n + nghost;
484     PetscCall(PetscCalloc1(n, &s->array));
485     s->array_allocated = s->array;
486   }
487 
488   /* By default parallel vectors do not have local representation */
489   s->localrep    = NULL;
490   s->localupdate = NULL;
491 
492   v->stash.insertmode  = NOT_SET_VALUES;
493   v->bstash.insertmode = NOT_SET_VALUES;
494   /* create the stashes. The block-size for bstash is set later when
495      VecSetValuesBlocked is called.
496   */
497   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
498   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
499 
500 #if defined(PETSC_HAVE_MATLAB)
501   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
502   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
503 #endif
504   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
505   PetscFunctionReturn(0);
506 }
507 
508 /*MC
509    VECMPI - VECMPI = "mpi" - The basic parallel vector
510 
511    Options Database Keys:
512 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
513 
514   Level: beginner
515 
516 .seealso: `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
517 M*/
518 
519 PetscErrorCode VecCreate_MPI(Vec vv) {
520   PetscFunctionBegin;
521   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
522   PetscFunctionReturn(0);
523 }
524 
525 /*MC
526    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
527 
528    Options Database Keys:
529 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
530 
531   Level: beginner
532 
533 .seealso: `VecCreateSeq()`, `VecCreateMPI()`
534 M*/
535 
536 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v) {
537   PetscMPIInt size;
538 
539   PetscFunctionBegin;
540   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
541   if (size == 1) {
542     PetscCall(VecSetType(v, VECSEQ));
543   } else {
544     PetscCall(VecSetType(v, VECMPI));
545   }
546   PetscFunctionReturn(0);
547 }
548 
549 /*@C
550    VecCreateMPIWithArray - Creates a parallel, array-style vector,
551    where the user provides the array space to store the vector values.
552 
553    Collective
554 
555    Input Parameters:
556 +  comm  - the MPI communicator to use
557 .  bs    - block size, same meaning as VecSetBlockSize()
558 .  n     - local vector length, cannot be PETSC_DECIDE
559 .  N     - global vector length (or PETSC_DECIDE to have calculated)
560 -  array - the user provided array to store the vector values
561 
562    Output Parameter:
563 .  vv - the vector
564 
565    Notes:
566    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
567    same type as an existing vector.
568 
569    If the user-provided array is NULL, then VecPlaceArray() can be used
570    at a later stage to SET the array for storing the vector values.
571 
572    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
573    The user should not free the array until the vector is destroyed.
574 
575    Level: intermediate
576 
577 .seealso: `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
578           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
579 
580 @*/
581 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv) {
582   PetscFunctionBegin;
583   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
584   PetscCall(PetscSplitOwnership(comm, &n, &N));
585   PetscCall(VecCreate(comm, vv));
586   PetscCall(VecSetSizes(*vv, n, N));
587   PetscCall(VecSetBlockSize(*vv, bs));
588   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
589   PetscFunctionReturn(0);
590 }
591 
592 /*@C
593    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
594    the caller allocates the array space.
595 
596    Collective
597 
598    Input Parameters:
599 +  comm - the MPI communicator to use
600 .  n - local vector length
601 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
602 .  nghost - number of local ghost points
603 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
604 -  array - the space to store the vector values (as long as n + nghost)
605 
606    Output Parameter:
607 .  vv - the global vector representation (without ghost points as part of vector)
608 
609    Notes:
610    Use VecGhostGetLocalForm() to access the local, ghosted representation
611    of the vector.
612 
613    This also automatically sets the ISLocalToGlobalMapping() for this vector.
614 
615    Level: advanced
616 
617 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
618           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
619           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
620 
621 @*/
622 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
623   Vec_MPI               *w;
624   PetscScalar           *larray;
625   IS                     from, to;
626   ISLocalToGlobalMapping ltog;
627   PetscInt               rstart, i, *indices;
628 
629   PetscFunctionBegin;
630   *vv = NULL;
631 
632   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
633   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
634   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
635   PetscCall(PetscSplitOwnership(comm, &n, &N));
636   /* Create global representation */
637   PetscCall(VecCreate(comm, vv));
638   PetscCall(VecSetSizes(*vv, n, N));
639   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
640   w = (Vec_MPI *)(*vv)->data;
641   /* Create local representation */
642   PetscCall(VecGetArray(*vv, &larray));
643   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
644   PetscCall(VecRestoreArray(*vv, &larray));
645 
646   /*
647        Create scatter context for scattering (updating) ghost values
648   */
649   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
650   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
651   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
652   PetscCall(ISDestroy(&to));
653   PetscCall(ISDestroy(&from));
654 
655   /* set local to global mapping for ghosted vector */
656   PetscCall(PetscMalloc1(n + nghost, &indices));
657   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
658   for (i = 0; i < n; i++) indices[i] = rstart + i;
659   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
660   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
661   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
662   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
663   PetscFunctionReturn(0);
664 }
665 
666 /*@
667    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
668 
669    Collective
670 
671    Input Parameters:
672 +  comm - the MPI communicator to use
673 .  n - local vector length
674 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
675 .  nghost - number of local ghost points
676 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
677 
678    Output Parameter:
679 .  vv - the global vector representation (without ghost points as part of vector)
680 
681    Notes:
682    Use VecGhostGetLocalForm() to access the local, ghosted representation
683    of the vector.
684 
685    This also automatically sets the ISLocalToGlobalMapping() for this vector.
686 
687    Level: advanced
688 
689 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
690           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
691           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
692           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
693 
694 @*/
695 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
696   PetscFunctionBegin;
697   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
698   PetscFunctionReturn(0);
699 }
700 
701 /*@
702    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
703 
704    Collective on Vec
705 
706    Input Parameters:
707 +  vv - the MPI vector
708 .  nghost - number of local ghost points
709 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
710 
711    Notes:
712    Use VecGhostGetLocalForm() to access the local, ghosted representation
713    of the vector.
714 
715    This also automatically sets the ISLocalToGlobalMapping() for this vector.
716 
717    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
718 
719    Level: advanced
720 
721 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
722           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
723           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
724           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
725 
726 @*/
727 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[]) {
728   PetscBool flg;
729 
730   PetscFunctionBegin;
731   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
732   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
733   if (flg) {
734     PetscInt               n, N;
735     Vec_MPI               *w;
736     PetscScalar           *larray;
737     IS                     from, to;
738     ISLocalToGlobalMapping ltog;
739     PetscInt               rstart, i, *indices;
740     MPI_Comm               comm;
741 
742     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
743     n = vv->map->n;
744     N = vv->map->N;
745     PetscUseTypeMethod(vv, destroy);
746     PetscCall(VecSetSizes(vv, n, N));
747     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
748     w = (Vec_MPI *)(vv)->data;
749     /* Create local representation */
750     PetscCall(VecGetArray(vv, &larray));
751     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
752     PetscCall(VecRestoreArray(vv, &larray));
753 
754     /*
755      Create scatter context for scattering (updating) ghost values
756      */
757     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
758     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
759     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
760     PetscCall(ISDestroy(&to));
761     PetscCall(ISDestroy(&from));
762 
763     /* set local to global mapping for ghosted vector */
764     PetscCall(PetscMalloc1(n + nghost, &indices));
765     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
766 
767     for (i = 0; i < n; i++) indices[i] = rstart + i;
768     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
769 
770     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
771     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
772     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
773   } else {
774     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
775     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
776   }
777   PetscFunctionReturn(0);
778 }
779 
780 /* ------------------------------------------------------------------------------------------*/
781 /*@C
782    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
783    the caller allocates the array space. Indices in the ghost region are based on blocks.
784 
785    Collective
786 
787    Input Parameters:
788 +  comm - the MPI communicator to use
789 .  bs - block size
790 .  n - local vector length
791 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
792 .  nghost - number of local ghost blocks
793 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
794 -  array - the space to store the vector values (as long as n + nghost*bs)
795 
796    Output Parameter:
797 .  vv - the global vector representation (without ghost points as part of vector)
798 
799    Notes:
800    Use VecGhostGetLocalForm() to access the local, ghosted representation
801    of the vector.
802 
803    n is the local vector size (total local size not the number of blocks) while nghost
804    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
805    portion is bs*nghost
806 
807    Level: advanced
808 
809 .seealso: `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
810           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
811           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
812 
813 @*/
814 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv) {
815   Vec_MPI               *w;
816   PetscScalar           *larray;
817   IS                     from, to;
818   ISLocalToGlobalMapping ltog;
819   PetscInt               rstart, i, nb, *indices;
820 
821   PetscFunctionBegin;
822   *vv = NULL;
823 
824   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
825   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
826   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
827   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
828   PetscCall(PetscSplitOwnership(comm, &n, &N));
829   /* Create global representation */
830   PetscCall(VecCreate(comm, vv));
831   PetscCall(VecSetSizes(*vv, n, N));
832   PetscCall(VecSetBlockSize(*vv, bs));
833   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
834   w = (Vec_MPI *)(*vv)->data;
835   /* Create local representation */
836   PetscCall(VecGetArray(*vv, &larray));
837   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
838   PetscCall(VecRestoreArray(*vv, &larray));
839 
840   /*
841        Create scatter context for scattering (updating) ghost values
842   */
843   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
844   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
845   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
846   PetscCall(ISDestroy(&to));
847   PetscCall(ISDestroy(&from));
848 
849   /* set local to global mapping for ghosted vector */
850   nb = n / bs;
851   PetscCall(PetscMalloc1(nb + nghost, &indices));
852   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
853   rstart = rstart / bs;
854 
855   for (i = 0; i < nb; i++) indices[i] = rstart + i;
856   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
857 
858   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
859   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
860   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
861   PetscFunctionReturn(0);
862 }
863 
864 /*@
865    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
866         The indicing of the ghost points is done with blocks.
867 
868    Collective
869 
870    Input Parameters:
871 +  comm - the MPI communicator to use
872 .  bs - the block size
873 .  n - local vector length
874 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
875 .  nghost - number of local ghost blocks
876 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
877 
878    Output Parameter:
879 .  vv - the global vector representation (without ghost points as part of vector)
880 
881    Notes:
882    Use VecGhostGetLocalForm() to access the local, ghosted representation
883    of the vector.
884 
885    n is the local vector size (total local size not the number of blocks) while nghost
886    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
887    portion is bs*nghost
888 
889    Level: advanced
890 
891 .seealso: `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
892           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
893           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
894 
895 @*/
896 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv) {
897   PetscFunctionBegin;
898   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
899   PetscFunctionReturn(0);
900 }
901