xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 66af8762ec03dbef0e079729eb2a1734a35ed7ff)
1 /*
2    This file contains routines for Parallel vector operations.
3  */
4 #include <petscsys.h>
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I  "petscvec.h"   I*/
6 
7 PETSC_INTERN PetscErrorCode VecView_MPI_Draw(Vec, PetscViewer);
8 
9 PetscErrorCode VecPlaceArray_MPI(Vec vin, const PetscScalar *a)
10 {
11   Vec_MPI *v = (Vec_MPI *)vin->data;
12 
13   PetscFunctionBegin;
14   PetscCheck(!v->unplacedarray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
15   v->unplacedarray = v->array; /* save previous array so reset can bring it back */
16   v->array         = (PetscScalar *)a;
17   if (v->localrep) PetscCall(VecPlaceArray(v->localrep, a));
18   PetscFunctionReturn(PETSC_SUCCESS);
19 }
20 
21 PetscErrorCode VecDuplicate_MPI(Vec win, Vec *v)
22 {
23   Vec_MPI     *vw, *w = (Vec_MPI *)win->data;
24   PetscScalar *array;
25 
26   PetscFunctionBegin;
27   PetscCall(VecCreateWithLayout_Private(win->map, v));
28 
29   PetscCall(VecCreate_MPI_Private(*v, PETSC_TRUE, w->nghost, NULL));
30   vw           = (Vec_MPI *)(*v)->data;
31   (*v)->ops[0] = win->ops[0];
32 
33   /* save local representation of the parallel vector (and scatter) if it exists */
34   if (w->localrep) {
35     PetscCall(VecGetArray(*v, &array));
36     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, PetscAbs(win->map->bs), win->map->n + w->nghost, array, &vw->localrep));
37     vw->localrep->ops[0] = w->localrep->ops[0];
38     PetscCall(VecRestoreArray(*v, &array));
39 
40     vw->localupdate = w->localupdate;
41     if (vw->localupdate) PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
42   }
43 
44   /* New vector should inherit stashing property of parent */
45   (*v)->stash.donotstash   = win->stash.donotstash;
46   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
47 
48   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist, &((PetscObject)(*v))->olist));
49   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist, &((PetscObject)(*v))->qlist));
50 
51   (*v)->bstash.bs = win->bstash.bs;
52   PetscFunctionReturn(PETSC_SUCCESS);
53 }
54 
55 static PetscErrorCode VecSetOption_MPI(Vec V, VecOption op, PetscBool flag)
56 {
57   Vec_MPI *v = (Vec_MPI *)V->data;
58 
59   PetscFunctionBegin;
60   switch (op) {
61   case VEC_IGNORE_OFF_PROC_ENTRIES:
62     V->stash.donotstash = flag;
63     break;
64   case VEC_IGNORE_NEGATIVE_INDICES:
65     V->stash.ignorenegidx = flag;
66     break;
67   case VEC_SUBSET_OFF_PROC_ENTRIES:
68     v->assembly_subset = flag;              /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
69     if (!v->assembly_subset) {              /* User indicates "do not reuse the communication pattern" */
70       PetscCall(VecAssemblyReset_MPI(V));   /* Reset existing pattern to free memory */
71       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
72     }
73     break;
74   }
75 
76   PetscFunctionReturn(PETSC_SUCCESS);
77 }
78 
79 PetscErrorCode VecResetArray_MPI(Vec vin)
80 {
81   Vec_MPI *v = (Vec_MPI *)vin->data;
82 
83   PetscFunctionBegin;
84   v->array         = v->unplacedarray;
85   v->unplacedarray = NULL;
86   if (v->localrep) PetscCall(VecResetArray(v->localrep));
87   PetscFunctionReturn(PETSC_SUCCESS);
88 }
89 
90 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rankid, PetscMPIInt rank, void *sdata, MPI_Request req[], void *ctx)
91 {
92   Vec                X   = (Vec)ctx;
93   Vec_MPI           *x   = (Vec_MPI *)X->data;
94   VecAssemblyHeader *hdr = (VecAssemblyHeader *)sdata;
95   PetscInt           bs  = X->map->bs;
96 
97   PetscFunctionBegin;
98   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
99      messages can be empty, but we have to send them this time if we sent them before because the
100      receiver is expecting them.
101    */
102   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
103     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
104     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
105   }
106   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
107     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
108     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
109   }
110   PetscFunctionReturn(PETSC_SUCCESS);
111 }
112 
113 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm, const PetscMPIInt tag[], PetscMPIInt rank, void *rdata, MPI_Request req[], void *ctx)
114 {
115   Vec                X   = (Vec)ctx;
116   Vec_MPI           *x   = (Vec_MPI *)X->data;
117   VecAssemblyHeader *hdr = (VecAssemblyHeader *)rdata;
118   PetscInt           bs  = X->map->bs;
119   VecAssemblyFrame  *frame;
120 
121   PetscFunctionBegin;
122   PetscCall(PetscSegBufferGet(x->segrecvframe, 1, &frame));
123 
124   if (hdr->count) {
125     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->count, &frame->ints));
126     PetscCallMPI(MPI_Irecv(frame->ints, hdr->count, MPIU_INT, rank, tag[0], comm, &req[0]));
127     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->count, &frame->scalars));
128     PetscCallMPI(MPI_Irecv(frame->scalars, hdr->count, MPIU_SCALAR, rank, tag[1], comm, &req[1]));
129     frame->pendings = 2;
130   } else {
131     frame->ints     = NULL;
132     frame->scalars  = NULL;
133     frame->pendings = 0;
134   }
135 
136   if (hdr->bcount) {
137     PetscCall(PetscSegBufferGet(x->segrecvint, hdr->bcount, &frame->intb));
138     PetscCallMPI(MPI_Irecv(frame->intb, hdr->bcount, MPIU_INT, rank, tag[2], comm, &req[2]));
139     PetscCall(PetscSegBufferGet(x->segrecvscalar, hdr->bcount * bs, &frame->scalarb));
140     PetscCallMPI(MPI_Irecv(frame->scalarb, hdr->bcount * bs, MPIU_SCALAR, rank, tag[3], comm, &req[3]));
141     frame->pendingb = 2;
142   } else {
143     frame->intb     = NULL;
144     frame->scalarb  = NULL;
145     frame->pendingb = 0;
146   }
147   PetscFunctionReturn(PETSC_SUCCESS);
148 }
149 
150 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
151 {
152   Vec_MPI *x = (Vec_MPI *)X->data;
153   MPI_Comm comm;
154   PetscInt i, j, jb, bs;
155 
156   PetscFunctionBegin;
157   if (X->stash.donotstash) PetscFunctionReturn(PETSC_SUCCESS);
158 
159   PetscCall(PetscObjectGetComm((PetscObject)X, &comm));
160   PetscCall(VecGetBlockSize(X, &bs));
161   if (PetscDefined(USE_DEBUG)) {
162     InsertMode addv;
163     PetscCall(MPIU_Allreduce((PetscEnum *)&X->stash.insertmode, (PetscEnum *)&addv, 1, MPIU_ENUM, MPI_BOR, comm));
164     PetscCheck(addv != (ADD_VALUES | INSERT_VALUES), comm, PETSC_ERR_ARG_NOTSAMETYPE, "Some processors inserted values while others added");
165   }
166   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
167 
168   PetscCall(VecStashSortCompress_Private(&X->stash));
169   PetscCall(VecStashSortCompress_Private(&X->bstash));
170 
171   if (!x->sendranks) {
172     PetscMPIInt nowners, bnowners, *owners, *bowners;
173     PetscInt    ntmp;
174     PetscCall(VecStashGetOwnerList_Private(&X->stash, X->map, &nowners, &owners));
175     PetscCall(VecStashGetOwnerList_Private(&X->bstash, X->map, &bnowners, &bowners));
176     PetscCall(PetscMergeMPIIntArray(nowners, owners, bnowners, bowners, &ntmp, &x->sendranks));
177     x->nsendranks = ntmp;
178     PetscCall(PetscFree(owners));
179     PetscCall(PetscFree(bowners));
180     PetscCall(PetscMalloc1(x->nsendranks, &x->sendhdr));
181     PetscCall(PetscCalloc1(x->nsendranks, &x->sendptrs));
182   }
183   for (i = 0, j = 0, jb = 0; i < x->nsendranks; i++) {
184     PetscMPIInt rank         = x->sendranks[i];
185     x->sendhdr[i].insertmode = X->stash.insertmode;
186     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
187      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
188      * there is nothing new to send, so that size-zero messages get sent instead. */
189     x->sendhdr[i].count = 0;
190     if (X->stash.n) {
191       x->sendptrs[i].ints    = &X->stash.idx[j];
192       x->sendptrs[i].scalars = &X->stash.array[j];
193       for (; j < X->stash.n && X->stash.idx[j] < X->map->range[rank + 1]; j++) x->sendhdr[i].count++;
194     }
195     x->sendhdr[i].bcount = 0;
196     if (X->bstash.n) {
197       x->sendptrs[i].intb    = &X->bstash.idx[jb];
198       x->sendptrs[i].scalarb = &X->bstash.array[jb * bs];
199       for (; jb < X->bstash.n && X->bstash.idx[jb] * bs < X->map->range[rank + 1]; jb++) x->sendhdr[i].bcount++;
200     }
201   }
202 
203   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt), 1000, &x->segrecvint));
204   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar), 1000, &x->segrecvscalar));
205   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame), 50, &x->segrecvframe));
206   if (x->first_assembly_done) { /* this is not the first assembly */
207     PetscMPIInt tag[4];
208     for (i = 0; i < 4; i++) PetscCall(PetscCommGetNewTag(comm, &tag[i]));
209     for (i = 0; i < x->nsendranks; i++) PetscCall(VecAssemblySend_MPI_Private(comm, tag, i, x->sendranks[i], x->sendhdr + i, x->sendreqs + 4 * i, X));
210     for (i = 0; i < x->nrecvranks; i++) PetscCall(VecAssemblyRecv_MPI_Private(comm, tag, x->recvranks[i], x->recvhdr + i, x->recvreqs + 4 * i, X));
211     x->use_status = PETSC_TRUE;
212   } else { /* First time assembly */
213     PetscCall(PetscCommBuildTwoSidedFReq(comm, 3, MPIU_INT, x->nsendranks, x->sendranks, (PetscInt *)x->sendhdr, &x->nrecvranks, &x->recvranks, &x->recvhdr, 4, &x->sendreqs, &x->recvreqs, VecAssemblySend_MPI_Private, VecAssemblyRecv_MPI_Private, X));
214     x->use_status = PETSC_FALSE;
215   }
216 
217   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
218      This line says when assembly_subset is set, then we mark that the first assembly is done.
219    */
220   x->first_assembly_done = x->assembly_subset;
221 
222   {
223     PetscInt nstash, reallocs;
224     PetscCall(VecStashGetInfo_Private(&X->stash, &nstash, &reallocs));
225     PetscCall(PetscInfo(X, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
226     PetscCall(VecStashGetInfo_Private(&X->bstash, &nstash, &reallocs));
227     PetscCall(PetscInfo(X, "Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
228   }
229   PetscFunctionReturn(PETSC_SUCCESS);
230 }
231 
232 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
233 {
234   Vec_MPI          *x  = (Vec_MPI *)X->data;
235   PetscInt          bs = X->map->bs;
236   PetscMPIInt       npending, *some_indices, r;
237   MPI_Status       *some_statuses;
238   PetscScalar      *xarray;
239   VecAssemblyFrame *frame;
240 
241   PetscFunctionBegin;
242   if (X->stash.donotstash) {
243     X->stash.insertmode  = NOT_SET_VALUES;
244     X->bstash.insertmode = NOT_SET_VALUES;
245     PetscFunctionReturn(PETSC_SUCCESS);
246   }
247 
248   PetscCheck(x->segrecvframe, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
249   PetscCall(VecGetArray(X, &xarray));
250   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe, &frame));
251   PetscCall(PetscMalloc2(4 * x->nrecvranks, &some_indices, x->use_status ? 4 * x->nrecvranks : 0, &some_statuses));
252   for (r = 0, npending = 0; r < x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
253   while (npending > 0) {
254     PetscMPIInt ndone = 0, ii;
255     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
256      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
257      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
258      * subsequent assembly can set a proper subset of the values. */
259     PetscCallMPI(MPI_Waitsome(4 * x->nrecvranks, x->recvreqs, &ndone, some_indices, x->use_status ? some_statuses : MPI_STATUSES_IGNORE));
260     for (ii = 0; ii < ndone; ii++) {
261       PetscInt     i     = some_indices[ii] / 4, j, k;
262       InsertMode   imode = (InsertMode)x->recvhdr[i].insertmode;
263       PetscInt    *recvint;
264       PetscScalar *recvscalar;
265       PetscBool    intmsg   = (PetscBool)(some_indices[ii] % 2 == 0);
266       PetscBool    blockmsg = (PetscBool)((some_indices[ii] % 4) / 2 == 1);
267       npending--;
268       if (!blockmsg) { /* Scalar stash */
269         PetscMPIInt count;
270         if (--frame[i].pendings > 0) continue;
271         if (x->use_status) {
272           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
273         } else count = x->recvhdr[i].count;
274         for (j = 0, recvint = frame[i].ints, recvscalar = frame[i].scalars; j < count; j++, recvint++) {
275           PetscInt loc = *recvint - X->map->rstart;
276           PetscCheck(*recvint >= X->map->rstart && X->map->rend > *recvint, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]", *recvint, X->map->rstart, X->map->rend);
277           switch (imode) {
278           case ADD_VALUES:
279             xarray[loc] += *recvscalar++;
280             break;
281           case INSERT_VALUES:
282             xarray[loc] = *recvscalar++;
283             break;
284           default:
285             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
286           }
287         }
288       } else { /* Block stash */
289         PetscMPIInt count;
290         if (--frame[i].pendingb > 0) continue;
291         if (x->use_status) {
292           PetscCallMPI(MPI_Get_count(&some_statuses[ii], intmsg ? MPIU_INT : MPIU_SCALAR, &count));
293           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
294         } else count = x->recvhdr[i].bcount;
295         for (j = 0, recvint = frame[i].intb, recvscalar = frame[i].scalarb; j < count; j++, recvint++) {
296           PetscInt loc = (*recvint) * bs - X->map->rstart;
297           switch (imode) {
298           case ADD_VALUES:
299             for (k = loc; k < loc + bs; k++) xarray[k] += *recvscalar++;
300             break;
301           case INSERT_VALUES:
302             for (k = loc; k < loc + bs; k++) xarray[k] = *recvscalar++;
303             break;
304           default:
305             SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Insert mode not supported 0x%x", imode);
306           }
307         }
308       }
309     }
310   }
311   PetscCall(VecRestoreArray(X, &xarray));
312   PetscCallMPI(MPI_Waitall(4 * x->nsendranks, x->sendreqs, MPI_STATUSES_IGNORE));
313   PetscCall(PetscFree2(some_indices, some_statuses));
314   if (x->assembly_subset) {
315     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint, NULL));
316     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar, NULL));
317   } else {
318     PetscCall(VecAssemblyReset_MPI(X));
319   }
320 
321   X->stash.insertmode  = NOT_SET_VALUES;
322   X->bstash.insertmode = NOT_SET_VALUES;
323   PetscCall(VecStashScatterEnd_Private(&X->stash));
324   PetscCall(VecStashScatterEnd_Private(&X->bstash));
325   PetscFunctionReturn(PETSC_SUCCESS);
326 }
327 
328 PetscErrorCode VecAssemblyReset_MPI(Vec X)
329 {
330   Vec_MPI *x = (Vec_MPI *)X->data;
331 
332   PetscFunctionBegin;
333   PetscCall(PetscFree(x->sendreqs));
334   PetscCall(PetscFree(x->recvreqs));
335   PetscCall(PetscFree(x->sendranks));
336   PetscCall(PetscFree(x->recvranks));
337   PetscCall(PetscFree(x->sendhdr));
338   PetscCall(PetscFree(x->recvhdr));
339   PetscCall(PetscFree(x->sendptrs));
340   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
341   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
342   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
343   PetscFunctionReturn(PETSC_SUCCESS);
344 }
345 
346 static PetscErrorCode VecSetFromOptions_MPI(Vec X, PetscOptionItems *PetscOptionsObject)
347 {
348 #if !defined(PETSC_HAVE_MPIUNI)
349   PetscBool flg = PETSC_FALSE, set;
350 
351   PetscFunctionBegin;
352   PetscOptionsHeadBegin(PetscOptionsObject, "VecMPI Options");
353   PetscCall(PetscOptionsBool("-vec_assembly_legacy", "Use MPI 1 version of assembly", "", flg, &flg, &set));
354   if (set) {
355     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
356     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI : VecAssemblyEnd_MPI_BTS;
357   }
358   PetscOptionsHeadEnd();
359 #else
360   PetscFunctionBegin;
361   X->ops->assemblybegin = VecAssemblyBegin_MPI;
362   X->ops->assemblyend   = VecAssemblyEnd_MPI;
363 #endif
364   PetscFunctionReturn(PETSC_SUCCESS);
365 }
366 
367 static struct _VecOps DvOps = {PetscDesignatedInitializer(duplicate, VecDuplicate_MPI), /* 1 */
368                                PetscDesignatedInitializer(duplicatevecs, VecDuplicateVecs_Default),
369                                PetscDesignatedInitializer(destroyvecs, VecDestroyVecs_Default),
370                                PetscDesignatedInitializer(dot, VecDot_MPI),
371                                PetscDesignatedInitializer(mdot, VecMDot_MPI),
372                                PetscDesignatedInitializer(norm, VecNorm_MPI),
373                                PetscDesignatedInitializer(tdot, VecTDot_MPI),
374                                PetscDesignatedInitializer(mtdot, VecMTDot_MPI),
375                                PetscDesignatedInitializer(scale, VecScale_Seq),
376                                PetscDesignatedInitializer(copy, VecCopy_Seq), /* 10 */
377                                PetscDesignatedInitializer(set, VecSet_Seq),
378                                PetscDesignatedInitializer(swap, VecSwap_Seq),
379                                PetscDesignatedInitializer(axpy, VecAXPY_Seq),
380                                PetscDesignatedInitializer(axpby, VecAXPBY_Seq),
381                                PetscDesignatedInitializer(maxpy, VecMAXPY_Seq),
382                                PetscDesignatedInitializer(aypx, VecAYPX_Seq),
383                                PetscDesignatedInitializer(waxpy, VecWAXPY_Seq),
384                                PetscDesignatedInitializer(axpbypcz, VecAXPBYPCZ_Seq),
385                                PetscDesignatedInitializer(pointwisemult, VecPointwiseMult_Seq),
386                                PetscDesignatedInitializer(pointwisedivide, VecPointwiseDivide_Seq),
387                                PetscDesignatedInitializer(setvalues, VecSetValues_MPI), /* 20 */
388                                PetscDesignatedInitializer(assemblybegin, VecAssemblyBegin_MPI_BTS),
389                                PetscDesignatedInitializer(assemblyend, VecAssemblyEnd_MPI_BTS),
390                                PetscDesignatedInitializer(getarray, NULL),
391                                PetscDesignatedInitializer(getsize, VecGetSize_MPI),
392                                PetscDesignatedInitializer(getlocalsize, VecGetSize_Seq),
393                                PetscDesignatedInitializer(restorearray, NULL),
394                                PetscDesignatedInitializer(max, VecMax_MPI),
395                                PetscDesignatedInitializer(min, VecMin_MPI),
396                                PetscDesignatedInitializer(setrandom, VecSetRandom_Seq),
397                                PetscDesignatedInitializer(setoption, VecSetOption_MPI),
398                                PetscDesignatedInitializer(setvaluesblocked, VecSetValuesBlocked_MPI),
399                                PetscDesignatedInitializer(destroy, VecDestroy_MPI),
400                                PetscDesignatedInitializer(view, VecView_MPI),
401                                PetscDesignatedInitializer(placearray, VecPlaceArray_MPI),
402                                PetscDesignatedInitializer(replacearray, VecReplaceArray_Seq),
403                                PetscDesignatedInitializer(dot_local, VecDot_Seq),
404                                PetscDesignatedInitializer(tdot_local, VecTDot_Seq),
405                                PetscDesignatedInitializer(norm_local, VecNorm_Seq),
406                                PetscDesignatedInitializer(mdot_local, VecMDot_Seq),
407                                PetscDesignatedInitializer(mtdot_local, VecMTDot_Seq),
408                                PetscDesignatedInitializer(load, VecLoad_Default),
409                                PetscDesignatedInitializer(reciprocal, VecReciprocal_Default),
410                                PetscDesignatedInitializer(conjugate, VecConjugate_Seq),
411                                PetscDesignatedInitializer(setlocaltoglobalmapping, NULL),
412                                PetscDesignatedInitializer(setvalueslocal, NULL),
413                                PetscDesignatedInitializer(resetarray, VecResetArray_MPI),
414                                PetscDesignatedInitializer(setfromoptions, VecSetFromOptions_MPI), /*set from options */
415                                PetscDesignatedInitializer(maxpointwisedivide, VecMaxPointwiseDivide_Seq),
416                                PetscDesignatedInitializer(pointwisemax, VecPointwiseMax_Seq),
417                                PetscDesignatedInitializer(pointwisemaxabs, VecPointwiseMaxAbs_Seq),
418                                PetscDesignatedInitializer(pointwisemin, VecPointwiseMin_Seq),
419                                PetscDesignatedInitializer(getvalues, VecGetValues_MPI),
420                                PetscDesignatedInitializer(sqrt, NULL),
421                                PetscDesignatedInitializer(abs, NULL),
422                                PetscDesignatedInitializer(exp, NULL),
423                                PetscDesignatedInitializer(log, NULL),
424                                PetscDesignatedInitializer(shift, NULL),
425                                PetscDesignatedInitializer(create, NULL), /* really? */
426                                PetscDesignatedInitializer(stridegather, VecStrideGather_Default),
427                                PetscDesignatedInitializer(stridescatter, VecStrideScatter_Default),
428                                PetscDesignatedInitializer(dotnorm2, NULL),
429                                PetscDesignatedInitializer(getsubvector, NULL),
430                                PetscDesignatedInitializer(restoresubvector, NULL),
431                                PetscDesignatedInitializer(getarrayread, NULL),
432                                PetscDesignatedInitializer(restorearrayread, NULL),
433                                PetscDesignatedInitializer(stridesubsetgather, VecStrideSubSetGather_Default),
434                                PetscDesignatedInitializer(stridesubsetscatter, VecStrideSubSetScatter_Default),
435                                PetscDesignatedInitializer(viewnative, VecView_MPI),
436                                PetscDesignatedInitializer(loadnative, NULL),
437                                PetscDesignatedInitializer(createlocalvector, NULL),
438                                PetscDesignatedInitializer(getlocalvector, NULL),
439                                PetscDesignatedInitializer(restorelocalvector, NULL),
440                                PetscDesignatedInitializer(getlocalvectorread, NULL),
441                                PetscDesignatedInitializer(restorelocalvectorread, NULL),
442                                PetscDesignatedInitializer(bindtocpu, NULL),
443                                PetscDesignatedInitializer(getarraywrite, NULL),
444                                PetscDesignatedInitializer(restorearraywrite, NULL),
445                                PetscDesignatedInitializer(getarrayandmemtype, NULL),
446                                PetscDesignatedInitializer(restorearrayandmemtype, NULL),
447                                PetscDesignatedInitializer(getarrayreadandmemtype, NULL),
448                                PetscDesignatedInitializer(restorearrayreadandmemtype, NULL),
449                                PetscDesignatedInitializer(getarraywriteandmemtype, NULL),
450                                PetscDesignatedInitializer(restorearraywriteandmemtype, NULL),
451                                PetscDesignatedInitializer(concatenate, NULL),
452                                PetscDesignatedInitializer(sum, NULL),
453                                PetscDesignatedInitializer(setpreallocationcoo, VecSetPreallocationCOO_MPI),
454                                PetscDesignatedInitializer(setvaluescoo, VecSetValuesCOO_MPI),
455                                PetscDesignatedInitializer(errorwnorm, NULL)};
456 
457 /*
458     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
459     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
460     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
461 
462     If alloc is true and array is NULL then this routine allocates the space, otherwise
463     no space is allocated.
464 */
465 PetscErrorCode VecCreate_MPI_Private(Vec v, PetscBool alloc, PetscInt nghost, const PetscScalar array[])
466 {
467   Vec_MPI *s;
468 
469   PetscFunctionBegin;
470   PetscCall(PetscNew(&s));
471   v->data        = (void *)s;
472   v->ops[0]      = DvOps;
473   s->nghost      = nghost;
474   v->petscnative = PETSC_TRUE;
475   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
476 
477   PetscCall(PetscLayoutSetUp(v->map));
478 
479   s->array           = (PetscScalar *)array;
480   s->array_allocated = NULL;
481   if (alloc && !array) {
482     PetscInt n = v->map->n + nghost;
483     PetscCall(PetscCalloc1(n, &s->array));
484     s->array_allocated = s->array;
485     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_2], 0));
486     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_1], 0));
487     PetscCall(PetscObjectComposedDataSetReal((PetscObject)v, NormIds[NORM_INFINITY], 0));
488   }
489 
490   /* By default parallel vectors do not have local representation */
491   s->localrep    = NULL;
492   s->localupdate = NULL;
493 
494   v->stash.insertmode  = NOT_SET_VALUES;
495   v->bstash.insertmode = NOT_SET_VALUES;
496   /* create the stashes. The block-size for bstash is set later when
497      VecSetValuesBlocked is called.
498   */
499   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), 1, &v->stash));
500   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v), PetscAbs(v->map->bs), &v->bstash));
501 
502 #if defined(PETSC_HAVE_MATLAB)
503   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEnginePut_C", VecMatlabEnginePut_Default));
504   PetscCall(PetscObjectComposeFunction((PetscObject)v, "PetscMatlabEngineGet_C", VecMatlabEngineGet_Default));
505 #endif
506   PetscCall(PetscObjectChangeTypeName((PetscObject)v, VECMPI));
507   PetscFunctionReturn(PETSC_SUCCESS);
508 }
509 
510 /*MC
511    VECMPI - VECMPI = "mpi" - The basic parallel vector
512 
513    Options Database Key:
514 . -vec_type mpi - sets the vector type to `VECMPI` during a call to `VecSetFromOptions()`
515 
516   Level: beginner
517 
518 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecSetType()`, `VecSetFromOptions()`, `VecCreateMPIWithArray()`, `VECMPI`, `VecType`, `VecCreateMPI()`, `VecCreateMPI()`
519 M*/
520 
521 PetscErrorCode VecCreate_MPI(Vec vv)
522 {
523   PetscFunctionBegin;
524   PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, 0, NULL));
525   PetscFunctionReturn(PETSC_SUCCESS);
526 }
527 
528 /*MC
529    VECSTANDARD = "standard" - A `VECSEQ` on one process and `VECMPI` on more than one process
530 
531    Options Database Key:
532 . -vec_type standard - sets a vector type to standard on calls to `VecSetFromOptions()`
533 
534   Level: beginner
535 
536 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreateMPI()`
537 M*/
538 
539 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
540 {
541   PetscMPIInt size;
542 
543   PetscFunctionBegin;
544   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v), &size));
545   if (size == 1) {
546     PetscCall(VecSetType(v, VECSEQ));
547   } else {
548     PetscCall(VecSetType(v, VECMPI));
549   }
550   PetscFunctionReturn(PETSC_SUCCESS);
551 }
552 
553 /*@C
554   VecCreateMPIWithArray - Creates a parallel, array-style vector,
555   where the user provides the array space to store the vector values.
556 
557   Collective
558 
559   Input Parameters:
560 + comm  - the MPI communicator to use
561 . bs    - block size, same meaning as `VecSetBlockSize()`
562 . n     - local vector length, cannot be `PETSC_DECIDE`
563 . N     - global vector length (or `PETSC_DETERMINE` to have calculated)
564 - array - the user provided array to store the vector values
565 
566   Output Parameter:
567 . vv - the vector
568 
569   Level: intermediate
570 
571   Notes:
572   Use `VecDuplicate()` or `VecDuplicateVecs()` to form additional vectors of the
573   same type as an existing vector.
574 
575   If the user-provided array is `NULL`, then `VecPlaceArray()` can be used
576   at a later stage to SET the array for storing the vector values.
577 
578   PETSc does NOT free `array` when the vector is destroyed via `VecDestroy()`.
579 
580   The user should not free `array` until the vector is destroyed.
581 
582 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeqWithArray()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateGhost()`,
583           `VecCreateMPI()`, `VecCreateGhostWithArray()`, `VecPlaceArray()`
584 @*/
585 PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, const PetscScalar array[], Vec *vv)
586 {
587   PetscFunctionBegin;
588   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size of vector");
589   PetscCall(PetscSplitOwnership(comm, &n, &N));
590   PetscCall(VecCreate(comm, vv));
591   PetscCall(VecSetSizes(*vv, n, N));
592   PetscCall(VecSetBlockSize(*vv, bs));
593   PetscCall(VecCreate_MPI_Private(*vv, PETSC_FALSE, 0, array));
594   PetscFunctionReturn(PETSC_SUCCESS);
595 }
596 
597 /*@C
598   VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
599   the caller allocates the array space.
600 
601   Collective
602 
603   Input Parameters:
604 + comm   - the MPI communicator to use
605 . n      - local vector length
606 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
607 . nghost - number of local ghost points
608 . ghosts - global indices of ghost points (or `NULL` if not needed), these do not need to be in increasing order (sorted)
609 - array  - the space to store the vector values (as long as n + nghost)
610 
611   Output Parameter:
612 . vv - the global vector representation (without ghost points as part of vector)
613 
614   Level: advanced
615 
616   Notes:
617   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
618   of the vector.
619 
620   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
621 
622 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
623           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
624           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
625 @*/
626 PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
627 {
628   Vec_MPI               *w;
629   PetscScalar           *larray;
630   IS                     from, to;
631   ISLocalToGlobalMapping ltog;
632   PetscInt               rstart, i, *indices;
633 
634   PetscFunctionBegin;
635   *vv = NULL;
636 
637   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
638   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
639   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
640   PetscCall(PetscSplitOwnership(comm, &n, &N));
641   /* Create global representation */
642   PetscCall(VecCreate(comm, vv));
643   PetscCall(VecSetSizes(*vv, n, N));
644   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost, array));
645   w = (Vec_MPI *)(*vv)->data;
646   /* Create local representation */
647   PetscCall(VecGetArray(*vv, &larray));
648   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
649   PetscCall(VecRestoreArray(*vv, &larray));
650 
651   /*
652        Create scatter context for scattering (updating) ghost values
653   */
654   PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
655   PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
656   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
657   PetscCall(ISDestroy(&to));
658   PetscCall(ISDestroy(&from));
659 
660   /* set local to global mapping for ghosted vector */
661   PetscCall(PetscMalloc1(n + nghost, &indices));
662   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
663   for (i = 0; i < n; i++) indices[i] = rstart + i;
664   for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
665   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
666   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
667   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
668   PetscFunctionReturn(PETSC_SUCCESS);
669 }
670 
671 /*@
672   VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
673 
674   Collective
675 
676   Input Parameters:
677 + comm   - the MPI communicator to use
678 . n      - local vector length
679 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
680 . nghost - number of local ghost points
681 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
682 
683   Output Parameter:
684 . vv - the global vector representation (without ghost points as part of vector)
685 
686   Level: advanced
687 
688   Notes:
689   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
690   of the vector.
691 
692   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
693 
694 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
695           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
696           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
697           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`, `VecMPISetGhost()`
698 
699 @*/
700 PetscErrorCode VecCreateGhost(MPI_Comm comm, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
701 {
702   PetscFunctionBegin;
703   PetscCall(VecCreateGhostWithArray(comm, n, N, nghost, ghosts, NULL, vv));
704   PetscFunctionReturn(PETSC_SUCCESS);
705 }
706 
707 /*@
708   VecMPISetGhost - Sets the ghost points for an MPI ghost vector
709 
710   Collective
711 
712   Input Parameters:
713 + vv     - the MPI vector
714 . nghost - number of local ghost points
715 - ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
716 
717   Level: advanced
718 
719   Notes:
720   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
721   of the vector.
722 
723   This also automatically sets the `ISLocalToGlobalMapping()` for this vector.
724 
725   You must call this AFTER you have set the type of the vector (with` VecSetType()`) and the size (with `VecSetSizes()`).
726 
727 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
728           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`, `VecGhostUpdateBegin()`,
729           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecGhostUpdateEnd()`,
730           `VecCreateGhostBlock()`, `VecCreateGhostBlockWithArray()`
731 @*/
732 PetscErrorCode VecMPISetGhost(Vec vv, PetscInt nghost, const PetscInt ghosts[])
733 {
734   PetscBool flg;
735 
736   PetscFunctionBegin;
737   PetscCall(PetscObjectTypeCompare((PetscObject)vv, VECMPI, &flg));
738   /* if already fully existent VECMPI then basically destroy it and rebuild with ghosting */
739   if (flg) {
740     PetscInt               n, N;
741     Vec_MPI               *w;
742     PetscScalar           *larray;
743     IS                     from, to;
744     ISLocalToGlobalMapping ltog;
745     PetscInt               rstart, i, *indices;
746     MPI_Comm               comm;
747 
748     PetscCall(PetscObjectGetComm((PetscObject)vv, &comm));
749     n = vv->map->n;
750     N = vv->map->N;
751     PetscUseTypeMethod(vv, destroy);
752     PetscCall(VecSetSizes(vv, n, N));
753     PetscCall(VecCreate_MPI_Private(vv, PETSC_TRUE, nghost, NULL));
754     w = (Vec_MPI *)(vv)->data;
755     /* Create local representation */
756     PetscCall(VecGetArray(vv, &larray));
757     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, n + nghost, larray, &w->localrep));
758     PetscCall(VecRestoreArray(vv, &larray));
759 
760     /*
761      Create scatter context for scattering (updating) ghost values
762      */
763     PetscCall(ISCreateGeneral(comm, nghost, ghosts, PETSC_COPY_VALUES, &from));
764     PetscCall(ISCreateStride(PETSC_COMM_SELF, nghost, n, 1, &to));
765     PetscCall(VecScatterCreate(vv, from, w->localrep, to, &w->localupdate));
766     PetscCall(ISDestroy(&to));
767     PetscCall(ISDestroy(&from));
768 
769     /* set local to global mapping for ghosted vector */
770     PetscCall(PetscMalloc1(n + nghost, &indices));
771     PetscCall(VecGetOwnershipRange(vv, &rstart, NULL));
772 
773     for (i = 0; i < n; i++) indices[i] = rstart + i;
774     for (i = 0; i < nghost; i++) indices[n + i] = ghosts[i];
775 
776     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n + nghost, indices, PETSC_OWN_POINTER, &ltog));
777     PetscCall(VecSetLocalToGlobalMapping(vv, ltog));
778     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
779   } else {
780     PetscCheck(vv->ops->create != VecCreate_MPI, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set local or global size before setting ghosting");
781     PetscCheck(((PetscObject)vv)->type_name, PetscObjectComm((PetscObject)vv), PETSC_ERR_ARG_WRONGSTATE, "Must set type to VECMPI before ghosting");
782   }
783   PetscFunctionReturn(PETSC_SUCCESS);
784 }
785 
786 /* ------------------------------------------------------------------------------------------*/
787 /*@C
788   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
789   the caller allocates the array space. Indices in the ghost region are based on blocks.
790 
791   Collective
792 
793   Input Parameters:
794 + comm   - the MPI communicator to use
795 . bs     - block size
796 . n      - local vector length
797 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
798 . nghost - number of local ghost blocks
799 . ghosts - global indices of ghost blocks (or `NULL` if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
800 - array  - the space to store the vector values (as long as `n + nghost*bs`)
801 
802   Output Parameter:
803 . vv - the global vector representation (without ghost points as part of vector)
804 
805   Level: advanced
806 
807   Notes:
808   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
809   of the vector.
810 
811   n is the local vector size (total local size not the number of blocks) while nghost
812   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
813   portion is bs*nghost
814 
815 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreate()`, `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
816           `VecCreateGhost()`, `VecCreateSeqWithArray()`, `VecCreateMPIWithArray()`,
817           `VecCreateGhostWithArray()`, `VecCreateGhostBlock()`
818 @*/
819 PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], const PetscScalar array[], Vec *vv)
820 {
821   Vec_MPI               *w;
822   PetscScalar           *larray;
823   IS                     from, to;
824   ISLocalToGlobalMapping ltog;
825   PetscInt               rstart, i, nb, *indices;
826 
827   PetscFunctionBegin;
828   *vv = NULL;
829 
830   PetscCheck(n != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local size");
831   PetscCheck(nghost != PETSC_DECIDE, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must set local ghost size");
832   PetscCheck(nghost >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Ghost length must be >= 0");
833   PetscCheck(n % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local size must be a multiple of block size");
834   PetscCall(PetscSplitOwnership(comm, &n, &N));
835   /* Create global representation */
836   PetscCall(VecCreate(comm, vv));
837   PetscCall(VecSetSizes(*vv, n, N));
838   PetscCall(VecSetBlockSize(*vv, bs));
839   PetscCall(VecCreate_MPI_Private(*vv, PETSC_TRUE, nghost * bs, array));
840   w = (Vec_MPI *)(*vv)->data;
841   /* Create local representation */
842   PetscCall(VecGetArray(*vv, &larray));
843   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, bs, n + bs * nghost, larray, &w->localrep));
844   PetscCall(VecRestoreArray(*vv, &larray));
845 
846   /*
847        Create scatter context for scattering (updating) ghost values
848   */
849   PetscCall(ISCreateBlock(comm, bs, nghost, ghosts, PETSC_COPY_VALUES, &from));
850   PetscCall(ISCreateStride(PETSC_COMM_SELF, bs * nghost, n, 1, &to));
851   PetscCall(VecScatterCreate(*vv, from, w->localrep, to, &w->localupdate));
852   PetscCall(ISDestroy(&to));
853   PetscCall(ISDestroy(&from));
854 
855   /* set local to global mapping for ghosted vector */
856   nb = n / bs;
857   PetscCall(PetscMalloc1(nb + nghost, &indices));
858   PetscCall(VecGetOwnershipRange(*vv, &rstart, NULL));
859   rstart = rstart / bs;
860 
861   for (i = 0; i < nb; i++) indices[i] = rstart + i;
862   for (i = 0; i < nghost; i++) indices[nb + i] = ghosts[i];
863 
864   PetscCall(ISLocalToGlobalMappingCreate(comm, bs, nb + nghost, indices, PETSC_OWN_POINTER, &ltog));
865   PetscCall(VecSetLocalToGlobalMapping(*vv, ltog));
866   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
867   PetscFunctionReturn(PETSC_SUCCESS);
868 }
869 
870 /*@
871   VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
872   The indicing of the ghost points is done with blocks.
873 
874   Collective
875 
876   Input Parameters:
877 + comm   - the MPI communicator to use
878 . bs     - the block size
879 . n      - local vector length
880 . N      - global vector length (or `PETSC_DETERMINE` to have calculated if `n` is given)
881 . nghost - number of local ghost blocks
882 - ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
883 
884   Output Parameter:
885 . vv - the global vector representation (without ghost points as part of vector)
886 
887   Level: advanced
888 
889   Notes:
890   Use `VecGhostGetLocalForm()` to access the local, ghosted representation
891   of the vector.
892 
893   `n` is the local vector size (total local size not the number of blocks) while `nghost`
894   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
895   portion is `bs*nghost`
896 
897 .seealso: [](ch_vectors), `Vec`, `VecType`, `VecCreateSeq()`, `VecCreate()`, `VecDuplicate()`, `VecDuplicateVecs()`, `VecCreateMPI()`,
898           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`,
899           `VecCreateGhostWithArray()`, `VecCreateMPIWithArray()`, `VecCreateGhostBlockWithArray()`
900 @*/
901 PetscErrorCode VecCreateGhostBlock(MPI_Comm comm, PetscInt bs, PetscInt n, PetscInt N, PetscInt nghost, const PetscInt ghosts[], Vec *vv)
902 {
903   PetscFunctionBegin;
904   PetscCall(VecCreateGhostBlockWithArray(comm, bs, n, N, nghost, ghosts, NULL, vv));
905   PetscFunctionReturn(PETSC_SUCCESS);
906 }
907