xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 76ce8f1ac04011f726b87b0654d63ff8fc64561d)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11 
12   PetscFunctionBegin;
13   PetscCall(VecDot_Seq(xin,yin,&work));
14   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
15   *z   = sum;
16   PetscFunctionReturn(0);
17 }
18 
19 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22 
23   PetscFunctionBegin;
24   PetscCall(VecTDot_Seq(xin,yin,&work));
25   PetscCall(MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin)));
26   *z   = sum;
27   PetscFunctionReturn(0);
28 }
29 
30 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
31 
32 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
33 {
34   Vec_MPI        *v = (Vec_MPI*)vin->data;
35 
36   PetscFunctionBegin;
37   PetscCheck(!v->unplacedarray,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
38   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
39   v->array         = (PetscScalar*)a;
40   if (v->localrep) {
41     PetscCall(VecPlaceArray(v->localrep,a));
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
47 {
48   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
49   PetscScalar    *array;
50 
51   PetscFunctionBegin;
52   PetscCall(VecCreate(PetscObjectComm((PetscObject)win),v));
53   PetscCall(PetscLayoutReference(win->map,&(*v)->map));
54 
55   PetscCall(VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL));
56   vw   = (Vec_MPI*)(*v)->data;
57   PetscCall(PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps)));
58 
59   /* save local representation of the parallel vector (and scatter) if it exists */
60   if (w->localrep) {
61     PetscCall(VecGetArray(*v,&array));
62     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep));
63     PetscCall(PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps)));
64     PetscCall(VecRestoreArray(*v,&array));
65     PetscCall(PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep));
66 
67     vw->localupdate = w->localupdate;
68     if (vw->localupdate) {
69       PetscCall(PetscObjectReference((PetscObject)vw->localupdate));
70     }
71   }
72 
73   /* New vector should inherit stashing property of parent */
74   (*v)->stash.donotstash   = win->stash.donotstash;
75   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
76 
77   PetscCall(PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist));
78   PetscCall(PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist));
79 
80   (*v)->map->bs   = PetscAbs(win->map->bs);
81   (*v)->bstash.bs = win->bstash.bs;
82   PetscFunctionReturn(0);
83 }
84 
85 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
86 {
87   Vec_MPI        *v = (Vec_MPI*)V->data;
88 
89   PetscFunctionBegin;
90   switch (op) {
91   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
92     break;
93   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
94     break;
95   case VEC_SUBSET_OFF_PROC_ENTRIES:
96     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
97     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
98       PetscCall(VecAssemblyReset_MPI(V)); /* Reset existing pattern to free memory */
99       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
100     }
101     break;
102   }
103 
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode VecResetArray_MPI(Vec vin)
108 {
109   Vec_MPI        *v = (Vec_MPI*)vin->data;
110 
111   PetscFunctionBegin;
112   v->array         = v->unplacedarray;
113   v->unplacedarray = NULL;
114   if (v->localrep) {
115     PetscCall(VecResetArray(v->localrep));
116   }
117   PetscFunctionReturn(0);
118 }
119 
120 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
121 {
122   Vec X = (Vec)ctx;
123   Vec_MPI *x = (Vec_MPI*)X->data;
124   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
125   PetscInt bs = X->map->bs;
126 
127   PetscFunctionBegin;
128   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
129      messages can be empty, but we have to send them this time if we sent them before because the
130      receiver is expecting them.
131    */
132   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
133     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
134     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
135   }
136   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
137     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
138     PetscCallMPI(MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
139   }
140   PetscFunctionReturn(0);
141 }
142 
143 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
144 {
145   Vec X = (Vec)ctx;
146   Vec_MPI *x = (Vec_MPI*)X->data;
147   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
148   PetscInt bs = X->map->bs;
149   VecAssemblyFrame *frame;
150 
151   PetscFunctionBegin;
152   PetscCall(PetscSegBufferGet(x->segrecvframe,1,&frame));
153 
154   if (hdr->count) {
155     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints));
156     PetscCallMPI(MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]));
157     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars));
158     PetscCallMPI(MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]));
159     frame->pendings = 2;
160   } else {
161     frame->ints = NULL;
162     frame->scalars = NULL;
163     frame->pendings = 0;
164   }
165 
166   if (hdr->bcount) {
167     PetscCall(PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb));
168     PetscCallMPI(MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]));
169     PetscCall(PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb));
170     PetscCallMPI(MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]));
171     frame->pendingb = 2;
172   } else {
173     frame->intb = NULL;
174     frame->scalarb = NULL;
175     frame->pendingb = 0;
176   }
177   PetscFunctionReturn(0);
178 }
179 
180 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
181 {
182   Vec_MPI        *x = (Vec_MPI*)X->data;
183   MPI_Comm       comm;
184   PetscInt       i,j,jb,bs;
185 
186   PetscFunctionBegin;
187   if (X->stash.donotstash) PetscFunctionReturn(0);
188 
189   PetscCall(PetscObjectGetComm((PetscObject)X,&comm));
190   PetscCall(VecGetBlockSize(X,&bs));
191   if (PetscDefined(USE_DEBUG)) {
192     InsertMode addv;
193     PetscCall(MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm));
194     PetscCheckFalse(addv == (ADD_VALUES|INSERT_VALUES),comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
195   }
196   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
197 
198   PetscCall(VecStashSortCompress_Private(&X->stash));
199   PetscCall(VecStashSortCompress_Private(&X->bstash));
200 
201   if (!x->sendranks) {
202     PetscMPIInt nowners,bnowners,*owners,*bowners;
203     PetscInt ntmp;
204     PetscCall(VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners));
205     PetscCall(VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners));
206     PetscCall(PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks));
207     x->nsendranks = ntmp;
208     PetscCall(PetscFree(owners));
209     PetscCall(PetscFree(bowners));
210     PetscCall(PetscMalloc1(x->nsendranks,&x->sendhdr));
211     PetscCall(PetscCalloc1(x->nsendranks,&x->sendptrs));
212   }
213   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
214     PetscMPIInt rank = x->sendranks[i];
215     x->sendhdr[i].insertmode = X->stash.insertmode;
216     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
217      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
218      * there is nothing new to send, so that size-zero messages get sent instead. */
219     x->sendhdr[i].count = 0;
220     if (X->stash.n) {
221       x->sendptrs[i].ints    = &X->stash.idx[j];
222       x->sendptrs[i].scalars = &X->stash.array[j];
223       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
224     }
225     x->sendhdr[i].bcount = 0;
226     if (X->bstash.n) {
227       x->sendptrs[i].intb    = &X->bstash.idx[jb];
228       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
229       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
230     }
231   }
232 
233   if (!x->segrecvint) PetscCall(PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint));
234   if (!x->segrecvscalar) PetscCall(PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar));
235   if (!x->segrecvframe) PetscCall(PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe));
236   if (x->first_assembly_done) { /* this is not the first assembly */
237     PetscMPIInt tag[4];
238     for (i=0; i<4; i++) PetscCall(PetscCommGetNewTag(comm,&tag[i]));
239     for (i=0; i<x->nsendranks; i++) {
240       PetscCall(VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X));
241     }
242     for (i=0; i<x->nrecvranks; i++) {
243       PetscCall(VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X));
244     }
245     x->use_status = PETSC_TRUE;
246   } else { /* First time assembly */
247     PetscCall(PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X));
248     x->use_status = PETSC_FALSE;
249   }
250 
251   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
252      This line says when assembly_subset is set, then we mark that the first assembly is done.
253    */
254   x->first_assembly_done = x->assembly_subset;
255 
256   {
257     PetscInt nstash,reallocs;
258     PetscCall(VecStashGetInfo_Private(&X->stash,&nstash,&reallocs));
259     PetscCall(PetscInfo(X,"Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
260     PetscCall(VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs));
261     PetscCall(PetscInfo(X,"Block-Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n",nstash,reallocs));
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
267 {
268   Vec_MPI *x = (Vec_MPI*)X->data;
269   PetscInt bs = X->map->bs;
270   PetscMPIInt npending,*some_indices,r;
271   MPI_Status  *some_statuses;
272   PetscScalar *xarray;
273   VecAssemblyFrame *frame;
274 
275   PetscFunctionBegin;
276   if (X->stash.donotstash) {
277     X->stash.insertmode = NOT_SET_VALUES;
278     X->bstash.insertmode = NOT_SET_VALUES;
279     PetscFunctionReturn(0);
280   }
281 
282   PetscCheck(x->segrecvframe,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
283   PetscCall(VecGetArray(X,&xarray));
284   PetscCall(PetscSegBufferExtractInPlace(x->segrecvframe,&frame));
285   PetscCall(PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses));
286   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
287   while (npending>0) {
288     PetscMPIInt ndone=0,ii;
289     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
290      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
291      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
292      * subsequent assembly can set a proper subset of the values. */
293     PetscCallMPI(MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE));
294     for (ii=0; ii<ndone; ii++) {
295       PetscInt i = some_indices[ii]/4,j,k;
296       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
297       PetscInt *recvint;
298       PetscScalar *recvscalar;
299       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
300       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
301       npending--;
302       if (!blockmsg) { /* Scalar stash */
303         PetscMPIInt count;
304         if (--frame[i].pendings > 0) continue;
305         if (x->use_status) {
306           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
307         } else count = x->recvhdr[i].count;
308         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
309           PetscInt loc = *recvint - X->map->rstart;
310           PetscCheckFalse(*recvint < X->map->rstart || X->map->rend <= *recvint,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %" PetscInt_FMT " out of local range [%" PetscInt_FMT ",%" PetscInt_FMT ")]",*recvint,X->map->rstart,X->map->rend);
311           switch (imode) {
312           case ADD_VALUES:
313             xarray[loc] += *recvscalar++;
314             break;
315           case INSERT_VALUES:
316             xarray[loc] = *recvscalar++;
317             break;
318           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
319           }
320         }
321       } else {                  /* Block stash */
322         PetscMPIInt count;
323         if (--frame[i].pendingb > 0) continue;
324         if (x->use_status) {
325           PetscCallMPI(MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count));
326           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
327         } else count = x->recvhdr[i].bcount;
328         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
329           PetscInt loc = (*recvint)*bs - X->map->rstart;
330           switch (imode) {
331           case ADD_VALUES:
332             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
333             break;
334           case INSERT_VALUES:
335             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
336             break;
337           default: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
338           }
339         }
340       }
341     }
342   }
343   PetscCall(VecRestoreArray(X,&xarray));
344   PetscCallMPI(MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE));
345   PetscCall(PetscFree2(some_indices,some_statuses));
346   if (x->assembly_subset) {
347     void *dummy;                /* reset segbuffers */
348     PetscCall(PetscSegBufferExtractInPlace(x->segrecvint,&dummy));
349     PetscCall(PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy));
350   } else {
351     PetscCall(VecAssemblyReset_MPI(X));
352   }
353 
354   X->stash.insertmode = NOT_SET_VALUES;
355   X->bstash.insertmode = NOT_SET_VALUES;
356   PetscCall(VecStashScatterEnd_Private(&X->stash));
357   PetscCall(VecStashScatterEnd_Private(&X->bstash));
358   PetscFunctionReturn(0);
359 }
360 
361 PetscErrorCode VecAssemblyReset_MPI(Vec X)
362 {
363   Vec_MPI *x = (Vec_MPI*)X->data;
364 
365   PetscFunctionBegin;
366   PetscCall(PetscFree(x->sendreqs));
367   PetscCall(PetscFree(x->recvreqs));
368   PetscCall(PetscFree(x->sendranks));
369   PetscCall(PetscFree(x->recvranks));
370   PetscCall(PetscFree(x->sendhdr));
371   PetscCall(PetscFree(x->recvhdr));
372   PetscCall(PetscFree(x->sendptrs));
373   PetscCall(PetscSegBufferDestroy(&x->segrecvint));
374   PetscCall(PetscSegBufferDestroy(&x->segrecvscalar));
375   PetscCall(PetscSegBufferDestroy(&x->segrecvframe));
376   PetscFunctionReturn(0);
377 }
378 
379 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
380 {
381 #if !defined(PETSC_HAVE_MPIUNI)
382   PetscBool      flg = PETSC_FALSE,set;
383 
384   PetscFunctionBegin;
385   PetscCall(PetscOptionsHead(PetscOptionsObject,"VecMPI Options"));
386   PetscCall(PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set));
387   if (set) {
388     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
389     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
390   }
391   PetscCall(PetscOptionsTail());
392 #else
393   PetscFunctionBegin;
394   X->ops->assemblybegin = VecAssemblyBegin_MPI;
395   X->ops->assemblyend   = VecAssemblyEnd_MPI;
396 #endif
397   PetscFunctionReturn(0);
398 }
399 
400 static struct _VecOps DvOps = {
401   PetscDesignatedInitializer(duplicate,VecDuplicate_MPI), /* 1 */
402   PetscDesignatedInitializer(duplicatevecs,VecDuplicateVecs_Default),
403   PetscDesignatedInitializer(destroyvecs,VecDestroyVecs_Default),
404   PetscDesignatedInitializer(dot,VecDot_MPI),
405   PetscDesignatedInitializer(mdot,VecMDot_MPI),
406   PetscDesignatedInitializer(norm,VecNorm_MPI),
407   PetscDesignatedInitializer(tdot,VecTDot_MPI),
408   PetscDesignatedInitializer(mtdot,VecMTDot_MPI),
409   PetscDesignatedInitializer(scale,VecScale_Seq),
410   PetscDesignatedInitializer(copy,VecCopy_Seq), /* 10 */
411   PetscDesignatedInitializer(set,VecSet_Seq),
412   PetscDesignatedInitializer(swap,VecSwap_Seq),
413   PetscDesignatedInitializer(axpy,VecAXPY_Seq),
414   PetscDesignatedInitializer(axpby,VecAXPBY_Seq),
415   PetscDesignatedInitializer(maxpy,VecMAXPY_Seq),
416   PetscDesignatedInitializer(aypx,VecAYPX_Seq),
417   PetscDesignatedInitializer(waxpy,VecWAXPY_Seq),
418   PetscDesignatedInitializer(axpbypcz,VecAXPBYPCZ_Seq),
419   PetscDesignatedInitializer(pointwisemult,VecPointwiseMult_Seq),
420   PetscDesignatedInitializer(pointwisedivide,VecPointwiseDivide_Seq),
421   PetscDesignatedInitializer(setvalues,VecSetValues_MPI), /* 20 */
422   PetscDesignatedInitializer(assemblybegin,VecAssemblyBegin_MPI_BTS),
423   PetscDesignatedInitializer(assemblyend,VecAssemblyEnd_MPI_BTS),
424   PetscDesignatedInitializer(getarray,NULL),
425   PetscDesignatedInitializer(getsize,VecGetSize_MPI),
426   PetscDesignatedInitializer(getlocalsize,VecGetSize_Seq),
427   PetscDesignatedInitializer(restorearray,NULL),
428   PetscDesignatedInitializer(max,VecMax_MPI),
429   PetscDesignatedInitializer(min,VecMin_MPI),
430   PetscDesignatedInitializer(setrandom,VecSetRandom_Seq),
431   PetscDesignatedInitializer(setoption,VecSetOption_MPI),
432   PetscDesignatedInitializer(setvaluesblocked,VecSetValuesBlocked_MPI),
433   PetscDesignatedInitializer(destroy,VecDestroy_MPI),
434   PetscDesignatedInitializer(view,VecView_MPI),
435   PetscDesignatedInitializer(placearray,VecPlaceArray_MPI),
436   PetscDesignatedInitializer(replacearray,VecReplaceArray_Seq),
437   PetscDesignatedInitializer(dot_local,VecDot_Seq),
438   PetscDesignatedInitializer(tdot_local,VecTDot_Seq),
439   PetscDesignatedInitializer(norm_local,VecNorm_Seq),
440   PetscDesignatedInitializer(mdot_local,VecMDot_Seq),
441   PetscDesignatedInitializer(mtdot_local,VecMTDot_Seq),
442   PetscDesignatedInitializer(load,VecLoad_Default),
443   PetscDesignatedInitializer(reciprocal,VecReciprocal_Default),
444   PetscDesignatedInitializer(conjugate,VecConjugate_Seq),
445   PetscDesignatedInitializer(setlocaltoglobalmapping,NULL),
446   PetscDesignatedInitializer(setvalueslocal,NULL),
447   PetscDesignatedInitializer(resetarray,VecResetArray_MPI),
448   PetscDesignatedInitializer(setfromoptions,VecSetFromOptions_MPI),/*set from options */
449   PetscDesignatedInitializer(maxpointwisedivide,VecMaxPointwiseDivide_Seq),
450   PetscDesignatedInitializer(pointwisemax,VecPointwiseMax_Seq),
451   PetscDesignatedInitializer(pointwisemaxabs,VecPointwiseMaxAbs_Seq),
452   PetscDesignatedInitializer(pointwisemin,VecPointwiseMin_Seq),
453   PetscDesignatedInitializer(getvalues,VecGetValues_MPI),
454   PetscDesignatedInitializer(sqrt,NULL),
455   PetscDesignatedInitializer(abs,NULL),
456   PetscDesignatedInitializer(exp,NULL),
457   PetscDesignatedInitializer(log,NULL),
458   PetscDesignatedInitializer(shift,NULL),
459   PetscDesignatedInitializer(create,NULL), /* really? */
460   PetscDesignatedInitializer(stridegather,VecStrideGather_Default),
461   PetscDesignatedInitializer(stridescatter,VecStrideScatter_Default),
462   PetscDesignatedInitializer(dotnorm2,NULL),
463   PetscDesignatedInitializer(getsubvector,NULL),
464   PetscDesignatedInitializer(restoresubvector,NULL),
465   PetscDesignatedInitializer(getarrayread,NULL),
466   PetscDesignatedInitializer(restorearrayread,NULL),
467   PetscDesignatedInitializer(stridesubsetgather,VecStrideSubSetGather_Default),
468   PetscDesignatedInitializer(stridesubsetscatter,VecStrideSubSetScatter_Default),
469   PetscDesignatedInitializer(viewnative,VecView_MPI),
470   PetscDesignatedInitializer(loadnative,NULL),
471   PetscDesignatedInitializer(getlocalvector,NULL),
472 };
473 
474 /*
475     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
476     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
477     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
478 
479     If alloc is true and array is NULL then this routine allocates the space, otherwise
480     no space is allocated.
481 */
482 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
483 {
484   Vec_MPI        *s;
485 
486   PetscFunctionBegin;
487   PetscCall(PetscNewLog(v,&s));
488   v->data        = (void*)s;
489   PetscCall(PetscMemcpy(v->ops,&DvOps,sizeof(DvOps)));
490   s->nghost      = nghost;
491   v->petscnative = PETSC_TRUE;
492   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
493 
494   PetscCall(PetscLayoutSetUp(v->map));
495 
496   s->array           = (PetscScalar*)array;
497   s->array_allocated = NULL;
498   if (alloc && !array) {
499     PetscInt n = v->map->n+nghost;
500     PetscCall(PetscCalloc1(n,&s->array));
501     PetscCall(PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar)));
502     s->array_allocated = s->array;
503   }
504 
505   /* By default parallel vectors do not have local representation */
506   s->localrep    = NULL;
507   s->localupdate = NULL;
508 
509   v->stash.insertmode = NOT_SET_VALUES;
510   v->bstash.insertmode = NOT_SET_VALUES;
511   /* create the stashes. The block-size for bstash is set later when
512      VecSetValuesBlocked is called.
513   */
514   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash));
515   PetscCall(VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash));
516 
517 #if defined(PETSC_HAVE_MATLAB_ENGINE)
518   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default));
519   PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default));
520 #endif
521   PetscCall(PetscObjectChangeTypeName((PetscObject)v,VECMPI));
522   PetscFunctionReturn(0);
523 }
524 
525 /*MC
526    VECMPI - VECMPI = "mpi" - The basic parallel vector
527 
528    Options Database Keys:
529 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
530 
531   Level: beginner
532 
533 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
534 M*/
535 
536 PetscErrorCode VecCreate_MPI(Vec vv)
537 {
538   PetscFunctionBegin;
539   PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL));
540   PetscFunctionReturn(0);
541 }
542 
543 /*MC
544    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
545 
546    Options Database Keys:
547 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
548 
549   Level: beginner
550 
551 .seealso: VecCreateSeq(), VecCreateMPI()
552 M*/
553 
554 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
555 {
556   PetscMPIInt    size;
557 
558   PetscFunctionBegin;
559   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)v),&size));
560   if (size == 1) {
561     PetscCall(VecSetType(v,VECSEQ));
562   } else {
563     PetscCall(VecSetType(v,VECMPI));
564   }
565   PetscFunctionReturn(0);
566 }
567 
568 /*@C
569    VecCreateMPIWithArray - Creates a parallel, array-style vector,
570    where the user provides the array space to store the vector values.
571 
572    Collective
573 
574    Input Parameters:
575 +  comm  - the MPI communicator to use
576 .  bs    - block size, same meaning as VecSetBlockSize()
577 .  n     - local vector length, cannot be PETSC_DECIDE
578 .  N     - global vector length (or PETSC_DECIDE to have calculated)
579 -  array - the user provided array to store the vector values
580 
581    Output Parameter:
582 .  vv - the vector
583 
584    Notes:
585    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
586    same type as an existing vector.
587 
588    If the user-provided array is NULL, then VecPlaceArray() can be used
589    at a later stage to SET the array for storing the vector values.
590 
591    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
592    The user should not free the array until the vector is destroyed.
593 
594    Level: intermediate
595 
596 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
597           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
598 
599 @*/
600 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
601 {
602   PetscFunctionBegin;
603   PetscCheckFalse(n == PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
604   PetscCall(PetscSplitOwnership(comm,&n,&N));
605   PetscCall(VecCreate(comm,vv));
606   PetscCall(VecSetSizes(*vv,n,N));
607   PetscCall(VecSetBlockSize(*vv,bs));
608   PetscCall(VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array));
609   PetscFunctionReturn(0);
610 }
611 
612 /*@C
613    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
614    the caller allocates the array space.
615 
616    Collective
617 
618    Input Parameters:
619 +  comm - the MPI communicator to use
620 .  n - local vector length
621 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
622 .  nghost - number of local ghost points
623 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
624 -  array - the space to store the vector values (as long as n + nghost)
625 
626    Output Parameter:
627 .  vv - the global vector representation (without ghost points as part of vector)
628 
629    Notes:
630    Use VecGhostGetLocalForm() to access the local, ghosted representation
631    of the vector.
632 
633    This also automatically sets the ISLocalToGlobalMapping() for this vector.
634 
635    Level: advanced
636 
637 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
638           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
639           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
640 
641 @*/
642 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
643 {
644   Vec_MPI                *w;
645   PetscScalar            *larray;
646   IS                     from,to;
647   ISLocalToGlobalMapping ltog;
648   PetscInt               rstart,i,*indices;
649 
650   PetscFunctionBegin;
651   *vv = NULL;
652 
653   PetscCheckFalse(n == PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
654   PetscCheckFalse(nghost == PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
655   PetscCheckFalse(nghost < 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
656   PetscCall(PetscSplitOwnership(comm,&n,&N));
657   /* Create global representation */
658   PetscCall(VecCreate(comm,vv));
659   PetscCall(VecSetSizes(*vv,n,N));
660   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array));
661   w    = (Vec_MPI*)(*vv)->data;
662   /* Create local representation */
663   PetscCall(VecGetArray(*vv,&larray));
664   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
665   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
666   PetscCall(VecRestoreArray(*vv,&larray));
667 
668   /*
669        Create scatter context for scattering (updating) ghost values
670   */
671   PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
672   PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
673   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
674   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
675   PetscCall(ISDestroy(&to));
676   PetscCall(ISDestroy(&from));
677 
678   /* set local to global mapping for ghosted vector */
679   PetscCall(PetscMalloc1(n+nghost,&indices));
680   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
681   for (i=0; i<n; i++) {
682     indices[i] = rstart + i;
683   }
684   for (i=0; i<nghost; i++) {
685     indices[n+i] = ghosts[i];
686   }
687   PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
688   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
689   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
690   PetscFunctionReturn(0);
691 }
692 
693 /*@
694    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
695 
696    Collective
697 
698    Input Parameters:
699 +  comm - the MPI communicator to use
700 .  n - local vector length
701 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
702 .  nghost - number of local ghost points
703 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
704 
705    Output Parameter:
706 .  vv - the global vector representation (without ghost points as part of vector)
707 
708    Notes:
709    Use VecGhostGetLocalForm() to access the local, ghosted representation
710    of the vector.
711 
712    This also automatically sets the ISLocalToGlobalMapping() for this vector.
713 
714    Level: advanced
715 
716 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
717           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
718           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
719           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
720 
721 @*/
722 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
723 {
724   PetscFunctionBegin;
725   PetscCall(VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv));
726   PetscFunctionReturn(0);
727 }
728 
729 /*@
730    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
731 
732    Collective on Vec
733 
734    Input Parameters:
735 +  vv - the MPI vector
736 .  nghost - number of local ghost points
737 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
738 
739    Notes:
740    Use VecGhostGetLocalForm() to access the local, ghosted representation
741    of the vector.
742 
743    This also automatically sets the ISLocalToGlobalMapping() for this vector.
744 
745    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
746 
747    Level: advanced
748 
749 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
750           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
751           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
752           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
753 
754 @*/
755 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
756 {
757   PetscBool      flg;
758 
759   PetscFunctionBegin;
760   PetscCall(PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg));
761   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
762   if (flg) {
763     PetscInt               n,N;
764     Vec_MPI                *w;
765     PetscScalar            *larray;
766     IS                     from,to;
767     ISLocalToGlobalMapping ltog;
768     PetscInt               rstart,i,*indices;
769     MPI_Comm               comm;
770 
771     PetscCall(PetscObjectGetComm((PetscObject)vv,&comm));
772     n    = vv->map->n;
773     N    = vv->map->N;
774     PetscCall((*vv->ops->destroy)(vv));
775     PetscCall(VecSetSizes(vv,n,N));
776     PetscCall(VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL));
777     w    = (Vec_MPI*)(vv)->data;
778     /* Create local representation */
779     PetscCall(VecGetArray(vv,&larray));
780     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep));
781     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep));
782     PetscCall(VecRestoreArray(vv,&larray));
783 
784     /*
785      Create scatter context for scattering (updating) ghost values
786      */
787     PetscCall(ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from));
788     PetscCall(ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to));
789     PetscCall(VecScatterCreate(vv,from,w->localrep,to,&w->localupdate));
790     PetscCall(PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate));
791     PetscCall(ISDestroy(&to));
792     PetscCall(ISDestroy(&from));
793 
794     /* set local to global mapping for ghosted vector */
795     PetscCall(PetscMalloc1(n+nghost,&indices));
796     PetscCall(VecGetOwnershipRange(vv,&rstart,NULL));
797 
798     for (i=0; i<n; i++)      indices[i]   = rstart + i;
799     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
800 
801     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog));
802     PetscCall(VecSetLocalToGlobalMapping(vv,ltog));
803     PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
804   } else PetscCheckFalse(vv->ops->create == VecCreate_MPI,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
805   else PetscCheckFalse(!((PetscObject)vv)->type_name,PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
806   PetscFunctionReturn(0);
807 }
808 
809 /* ------------------------------------------------------------------------------------------*/
810 /*@C
811    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
812    the caller allocates the array space. Indices in the ghost region are based on blocks.
813 
814    Collective
815 
816    Input Parameters:
817 +  comm - the MPI communicator to use
818 .  bs - block size
819 .  n - local vector length
820 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
821 .  nghost - number of local ghost blocks
822 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
823 -  array - the space to store the vector values (as long as n + nghost*bs)
824 
825    Output Parameter:
826 .  vv - the global vector representation (without ghost points as part of vector)
827 
828    Notes:
829    Use VecGhostGetLocalForm() to access the local, ghosted representation
830    of the vector.
831 
832    n is the local vector size (total local size not the number of blocks) while nghost
833    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
834    portion is bs*nghost
835 
836    Level: advanced
837 
838 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
839           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
840           VecCreateGhostWithArray(), VecCreateGhostBlock()
841 
842 @*/
843 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
844 {
845   Vec_MPI                *w;
846   PetscScalar            *larray;
847   IS                     from,to;
848   ISLocalToGlobalMapping ltog;
849   PetscInt               rstart,i,nb,*indices;
850 
851   PetscFunctionBegin;
852   *vv = NULL;
853 
854   PetscCheckFalse(n == PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
855   PetscCheckFalse(nghost == PETSC_DECIDE,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
856   PetscCheckFalse(nghost < 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
857   PetscCheckFalse(n % bs,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
858   PetscCall(PetscSplitOwnership(comm,&n,&N));
859   /* Create global representation */
860   PetscCall(VecCreate(comm,vv));
861   PetscCall(VecSetSizes(*vv,n,N));
862   PetscCall(VecSetBlockSize(*vv,bs));
863   PetscCall(VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array));
864   w    = (Vec_MPI*)(*vv)->data;
865   /* Create local representation */
866   PetscCall(VecGetArray(*vv,&larray));
867   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep));
868   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep));
869   PetscCall(VecRestoreArray(*vv,&larray));
870 
871   /*
872        Create scatter context for scattering (updating) ghost values
873   */
874   PetscCall(ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from));
875   PetscCall(ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to));
876   PetscCall(VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate));
877   PetscCall(PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate));
878   PetscCall(ISDestroy(&to));
879   PetscCall(ISDestroy(&from));
880 
881   /* set local to global mapping for ghosted vector */
882   nb     = n/bs;
883   PetscCall(PetscMalloc1(nb+nghost,&indices));
884   PetscCall(VecGetOwnershipRange(*vv,&rstart,NULL));
885   rstart = rstart/bs;
886 
887   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
888   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
889 
890   PetscCall(ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog));
891   PetscCall(VecSetLocalToGlobalMapping(*vv,ltog));
892   PetscCall(ISLocalToGlobalMappingDestroy(&ltog));
893   PetscFunctionReturn(0);
894 }
895 
896 /*@
897    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
898         The indicing of the ghost points is done with blocks.
899 
900    Collective
901 
902    Input Parameters:
903 +  comm - the MPI communicator to use
904 .  bs - the block size
905 .  n - local vector length
906 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
907 .  nghost - number of local ghost blocks
908 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
909 
910    Output Parameter:
911 .  vv - the global vector representation (without ghost points as part of vector)
912 
913    Notes:
914    Use VecGhostGetLocalForm() to access the local, ghosted representation
915    of the vector.
916 
917    n is the local vector size (total local size not the number of blocks) while nghost
918    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
919    portion is bs*nghost
920 
921    Level: advanced
922 
923 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
924           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
925           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
926 
927 @*/
928 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
929 {
930   PetscFunctionBegin;
931   PetscCall(VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv));
932   PetscFunctionReturn(0);
933 }
934