xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d410b0cf18e1798d3d4c14858e0c2ffdbe2fea69)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRMPI(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRMPI(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
90 {
91   Vec_MPI        *v = (Vec_MPI*)V->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
97     break;
98   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
99     break;
100   case VEC_SUBSET_OFF_PROC_ENTRIES:
101     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
102     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
103       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
104       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
105     }
106     break;
107   }
108 
109   PetscFunctionReturn(0);
110 }
111 
112 static PetscErrorCode VecResetArray_MPI(Vec vin)
113 {
114   Vec_MPI        *v = (Vec_MPI*)vin->data;
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   v->array         = v->unplacedarray;
119   v->unplacedarray = NULL;
120   if (v->localrep) {
121     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
127 {
128   Vec X = (Vec)ctx;
129   Vec_MPI *x = (Vec_MPI*)X->data;
130   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
131   PetscInt bs = X->map->bs;
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
136      messages can be empty, but we have to send them this time if we sent them before because the
137      receiver is expecting them.
138    */
139   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
140     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
141     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
142   }
143   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
144     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
145     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
151 {
152   Vec X = (Vec)ctx;
153   Vec_MPI *x = (Vec_MPI*)X->data;
154   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
155   PetscErrorCode ierr;
156   PetscInt bs = X->map->bs;
157   VecAssemblyFrame *frame;
158 
159   PetscFunctionBegin;
160   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
161 
162   if (hdr->count) {
163     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
164     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
165     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
167     frame->pendings = 2;
168   } else {
169     frame->ints = NULL;
170     frame->scalars = NULL;
171     frame->pendings = 0;
172   }
173 
174   if (hdr->bcount) {
175     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
176     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
177     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
179     frame->pendingb = 2;
180   } else {
181     frame->intb = NULL;
182     frame->scalarb = NULL;
183     frame->pendingb = 0;
184   }
185   PetscFunctionReturn(0);
186 }
187 
188 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
189 {
190   Vec_MPI        *x = (Vec_MPI*)X->data;
191   PetscErrorCode ierr;
192   MPI_Comm       comm;
193   PetscInt       i,j,jb,bs;
194 
195   PetscFunctionBegin;
196   if (X->stash.donotstash) PetscFunctionReturn(0);
197 
198   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
199   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
200   if (PetscDefined(USE_DEBUG)) {
201     InsertMode addv;
202     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRMPI(ierr);
203     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
204   }
205   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
206 
207   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
208   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
209 
210   if (!x->sendranks) {
211     PetscMPIInt nowners,bnowners,*owners,*bowners;
212     PetscInt ntmp;
213     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
214     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
215     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
216     x->nsendranks = ntmp;
217     ierr = PetscFree(owners);CHKERRQ(ierr);
218     ierr = PetscFree(bowners);CHKERRQ(ierr);
219     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
220     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
221   }
222   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
223     PetscMPIInt rank = x->sendranks[i];
224     x->sendhdr[i].insertmode = X->stash.insertmode;
225     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
226      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
227      * there is nothing new to send, so that size-zero messages get sent instead. */
228     x->sendhdr[i].count = 0;
229     if (X->stash.n) {
230       x->sendptrs[i].ints    = &X->stash.idx[j];
231       x->sendptrs[i].scalars = &X->stash.array[j];
232       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
233     }
234     x->sendhdr[i].bcount = 0;
235     if (X->bstash.n) {
236       x->sendptrs[i].intb    = &X->bstash.idx[jb];
237       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
238       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
239     }
240   }
241 
242   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
243   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
244   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
245   if (x->first_assembly_done) { /* this is not the first assembly */
246     PetscMPIInt tag[4];
247     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
248     for (i=0; i<x->nsendranks; i++) {
249       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
250     }
251     for (i=0; i<x->nrecvranks; i++) {
252       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
253     }
254     x->use_status = PETSC_TRUE;
255   } else { /* First time assembly */
256     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
257     x->use_status = PETSC_FALSE;
258   }
259 
260   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
261      This line says when assembly_subset is set, then we mark that the first assembly is done.
262    */
263   x->first_assembly_done = x->assembly_subset;
264 
265   {
266     PetscInt nstash,reallocs;
267     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
268     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
269     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
270     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
271   }
272   PetscFunctionReturn(0);
273 }
274 
275 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
276 {
277   Vec_MPI *x = (Vec_MPI*)X->data;
278   PetscInt bs = X->map->bs;
279   PetscMPIInt npending,*some_indices,r;
280   MPI_Status  *some_statuses;
281   PetscScalar *xarray;
282   PetscErrorCode ierr;
283   VecAssemblyFrame *frame;
284 
285   PetscFunctionBegin;
286   if (X->stash.donotstash) {
287     X->stash.insertmode = NOT_SET_VALUES;
288     X->bstash.insertmode = NOT_SET_VALUES;
289     PetscFunctionReturn(0);
290   }
291 
292   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
293   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
294   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
295   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
296   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
297   while (npending>0) {
298     PetscMPIInt ndone=0,ii;
299     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
300      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
301      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
302      * subsequent assembly can set a proper subset of the values. */
303     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
304     for (ii=0; ii<ndone; ii++) {
305       PetscInt i = some_indices[ii]/4,j,k;
306       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
307       PetscInt *recvint;
308       PetscScalar *recvscalar;
309       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
310       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
311       npending--;
312       if (!blockmsg) { /* Scalar stash */
313         PetscMPIInt count;
314         if (--frame[i].pendings > 0) continue;
315         if (x->use_status) {
316           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
317         } else count = x->recvhdr[i].count;
318         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
319           PetscInt loc = *recvint - X->map->rstart;
320           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
321           switch (imode) {
322           case ADD_VALUES:
323             xarray[loc] += *recvscalar++;
324             break;
325           case INSERT_VALUES:
326             xarray[loc] = *recvscalar++;
327             break;
328           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
329           }
330         }
331       } else {                  /* Block stash */
332         PetscMPIInt count;
333         if (--frame[i].pendingb > 0) continue;
334         if (x->use_status) {
335           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
336           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
337         } else count = x->recvhdr[i].bcount;
338         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
339           PetscInt loc = (*recvint)*bs - X->map->rstart;
340           switch (imode) {
341           case ADD_VALUES:
342             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
343             break;
344           case INSERT_VALUES:
345             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
346             break;
347           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
348           }
349         }
350       }
351     }
352   }
353   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
354   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
355   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
356   if (x->assembly_subset) {
357     void *dummy;                /* reset segbuffers */
358     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
359     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
360   } else {
361     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
362   }
363 
364   X->stash.insertmode = NOT_SET_VALUES;
365   X->bstash.insertmode = NOT_SET_VALUES;
366   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
367   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 PetscErrorCode VecAssemblyReset_MPI(Vec X)
372 {
373   Vec_MPI *x = (Vec_MPI*)X->data;
374   PetscErrorCode ierr;
375 
376   PetscFunctionBegin;
377   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
378   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
379   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
380   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
381   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
384   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
385   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
386   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
391 {
392 #if !defined(PETSC_HAVE_MPIUNI)
393   PetscErrorCode ierr;
394   PetscBool      flg = PETSC_FALSE,set;
395 
396   PetscFunctionBegin;
397   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
398   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
399   if (set) {
400     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
401     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
402   }
403   ierr = PetscOptionsTail();CHKERRQ(ierr);
404 #else
405   PetscFunctionBegin;
406   X->ops->assemblybegin = VecAssemblyBegin_MPI;
407   X->ops->assemblyend   = VecAssemblyEnd_MPI;
408 #endif
409   PetscFunctionReturn(0);
410 }
411 
412 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
413                                 VecDuplicateVecs_Default,
414                                 VecDestroyVecs_Default,
415                                 VecDot_MPI,
416                                 VecMDot_MPI,
417                                 VecNorm_MPI,
418                                 VecTDot_MPI,
419                                 VecMTDot_MPI,
420                                 VecScale_Seq,
421                                 VecCopy_Seq, /* 10 */
422                                 VecSet_Seq,
423                                 VecSwap_Seq,
424                                 VecAXPY_Seq,
425                                 VecAXPBY_Seq,
426                                 VecMAXPY_Seq,
427                                 VecAYPX_Seq,
428                                 VecWAXPY_Seq,
429                                 VecAXPBYPCZ_Seq,
430                                 VecPointwiseMult_Seq,
431                                 VecPointwiseDivide_Seq,
432                                 VecSetValues_MPI, /* 20 */
433                                 VecAssemblyBegin_MPI_BTS,
434                                 VecAssemblyEnd_MPI_BTS,
435                                 NULL,
436                                 VecGetSize_MPI,
437                                 VecGetSize_Seq,
438                                 NULL,
439                                 VecMax_MPI,
440                                 VecMin_MPI,
441                                 VecSetRandom_Seq,
442                                 VecSetOption_MPI,
443                                 VecSetValuesBlocked_MPI,
444                                 VecDestroy_MPI,
445                                 VecView_MPI,
446                                 VecPlaceArray_MPI,
447                                 VecReplaceArray_Seq,
448                                 VecDot_Seq,
449                                 VecTDot_Seq,
450                                 VecNorm_Seq,
451                                 VecMDot_Seq,
452                                 VecMTDot_Seq,
453                                 VecLoad_Default,
454                                 VecReciprocal_Default,
455                                 VecConjugate_Seq,
456                                 NULL,
457                                 NULL,
458                                 VecResetArray_MPI,
459                                 VecSetFromOptions_MPI,/*set from options */
460                                 VecMaxPointwiseDivide_Seq,
461                                 VecPointwiseMax_Seq,
462                                 VecPointwiseMaxAbs_Seq,
463                                 VecPointwiseMin_Seq,
464                                 VecGetValues_MPI,
465                                 NULL,
466                                 NULL,
467                                 NULL,
468                                 NULL,
469                                 NULL,
470                                 NULL,
471                                 VecStrideGather_Default,
472                                 VecStrideScatter_Default,
473                                 NULL,
474                                 NULL,
475                                 NULL,
476                                 NULL,
477                                 NULL,
478                                 VecStrideSubSetGather_Default,
479                                 VecStrideSubSetScatter_Default,
480                                 NULL,
481                                 NULL,
482                                 NULL
483 };
484 
485 /*
486     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
487     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
488     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
489 
490     If alloc is true and array is NULL then this routine allocates the space, otherwise
491     no space is allocated.
492 */
493 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
494 {
495   Vec_MPI        *s;
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
500   v->data        = (void*)s;
501   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
502   s->nghost      = nghost;
503   v->petscnative = PETSC_TRUE;
504   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
505 
506   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
507 
508   s->array           = (PetscScalar*)array;
509   s->array_allocated = NULL;
510   if (alloc && !array) {
511     PetscInt n = v->map->n+nghost;
512     ierr               = PetscCalloc1(n,&s->array);CHKERRQ(ierr);
513     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
514     s->array_allocated = s->array;
515   }
516 
517   /* By default parallel vectors do not have local representation */
518   s->localrep    = NULL;
519   s->localupdate = NULL;
520 
521   v->stash.insertmode = NOT_SET_VALUES;
522   v->bstash.insertmode = NOT_SET_VALUES;
523   /* create the stashes. The block-size for bstash is set later when
524      VecSetValuesBlocked is called.
525   */
526   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
527   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
528 
529 #if defined(PETSC_HAVE_MATLAB_ENGINE)
530   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
531   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
532 #endif
533   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537 /*MC
538    VECMPI - VECMPI = "mpi" - The basic parallel vector
539 
540    Options Database Keys:
541 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
542 
543   Level: beginner
544 
545 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
546 M*/
547 
548 PetscErrorCode VecCreate_MPI(Vec vv)
549 {
550   PetscErrorCode ierr;
551 
552   PetscFunctionBegin;
553   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL);CHKERRQ(ierr);
554   PetscFunctionReturn(0);
555 }
556 
557 /*MC
558    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
559 
560    Options Database Keys:
561 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
562 
563   Level: beginner
564 
565 .seealso: VecCreateSeq(), VecCreateMPI()
566 M*/
567 
568 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
569 {
570   PetscErrorCode ierr;
571   PetscMPIInt    size;
572 
573   PetscFunctionBegin;
574   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRMPI(ierr);
575   if (size == 1) {
576     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
577   } else {
578     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 /*@C
584    VecCreateMPIWithArray - Creates a parallel, array-style vector,
585    where the user provides the array space to store the vector values.
586 
587    Collective
588 
589    Input Parameters:
590 +  comm  - the MPI communicator to use
591 .  bs    - block size, same meaning as VecSetBlockSize()
592 .  n     - local vector length, cannot be PETSC_DECIDE
593 .  N     - global vector length (or PETSC_DECIDE to have calculated)
594 -  array - the user provided array to store the vector values
595 
596    Output Parameter:
597 .  vv - the vector
598 
599    Notes:
600    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
601    same type as an existing vector.
602 
603    If the user-provided array is NULL, then VecPlaceArray() can be used
604    at a later stage to SET the array for storing the vector values.
605 
606    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
607    The user should not free the array until the vector is destroyed.
608 
609    Level: intermediate
610 
611 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
612           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
613 
614 @*/
615 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
616 {
617   PetscErrorCode ierr;
618 
619   PetscFunctionBegin;
620   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
621   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
622   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
623   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
624   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
625   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
626   PetscFunctionReturn(0);
627 }
628 
629 /*@C
630    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
631    the caller allocates the array space.
632 
633    Collective
634 
635    Input Parameters:
636 +  comm - the MPI communicator to use
637 .  n - local vector length
638 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
639 .  nghost - number of local ghost points
640 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
641 -  array - the space to store the vector values (as long as n + nghost)
642 
643    Output Parameter:
644 .  vv - the global vector representation (without ghost points as part of vector)
645 
646    Notes:
647    Use VecGhostGetLocalForm() to access the local, ghosted representation
648    of the vector.
649 
650    This also automatically sets the ISLocalToGlobalMapping() for this vector.
651 
652    Level: advanced
653 
654 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
655           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
656           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
657 
658 @*/
659 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
660 {
661   PetscErrorCode         ierr;
662   Vec_MPI                *w;
663   PetscScalar            *larray;
664   IS                     from,to;
665   ISLocalToGlobalMapping ltog;
666   PetscInt               rstart,i,*indices;
667 
668   PetscFunctionBegin;
669   *vv = NULL;
670 
671   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
672   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
673   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
674   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
675   /* Create global representation */
676   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
677   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
678   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
679   w    = (Vec_MPI*)(*vv)->data;
680   /* Create local representation */
681   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
682   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
683   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
684   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
685 
686   /*
687        Create scatter context for scattering (updating) ghost values
688   */
689   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
690   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
691   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
692   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
693   ierr = ISDestroy(&to);CHKERRQ(ierr);
694   ierr = ISDestroy(&from);CHKERRQ(ierr);
695 
696   /* set local to global mapping for ghosted vector */
697   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
698   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
699   for (i=0; i<n; i++) {
700     indices[i] = rstart + i;
701   }
702   for (i=0; i<nghost; i++) {
703     indices[n+i] = ghosts[i];
704   }
705   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
706   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
707   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 /*@
712    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
713 
714    Collective
715 
716    Input Parameters:
717 +  comm - the MPI communicator to use
718 .  n - local vector length
719 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
720 .  nghost - number of local ghost points
721 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
722 
723    Output Parameter:
724 .  vv - the global vector representation (without ghost points as part of vector)
725 
726    Notes:
727    Use VecGhostGetLocalForm() to access the local, ghosted representation
728    of the vector.
729 
730    This also automatically sets the ISLocalToGlobalMapping() for this vector.
731 
732    Level: advanced
733 
734 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
735           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
736           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
737           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
738 
739 @*/
740 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
741 {
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
746   PetscFunctionReturn(0);
747 }
748 
749 /*@
750    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
751 
752    Collective on Vec
753 
754    Input Parameters:
755 +  vv - the MPI vector
756 .  nghost - number of local ghost points
757 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
758 
759    Notes:
760    Use VecGhostGetLocalForm() to access the local, ghosted representation
761    of the vector.
762 
763    This also automatically sets the ISLocalToGlobalMapping() for this vector.
764 
765    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
766 
767    Level: advanced
768 
769 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
770           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
771           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
772           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
773 
774 @*/
775 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
776 {
777   PetscErrorCode ierr;
778   PetscBool      flg;
779 
780   PetscFunctionBegin;
781   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
782   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
783   if (flg) {
784     PetscInt               n,N;
785     Vec_MPI                *w;
786     PetscScalar            *larray;
787     IS                     from,to;
788     ISLocalToGlobalMapping ltog;
789     PetscInt               rstart,i,*indices;
790     MPI_Comm               comm;
791 
792     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
793     n    = vv->map->n;
794     N    = vv->map->N;
795     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
796     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
797     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
798     w    = (Vec_MPI*)(vv)->data;
799     /* Create local representation */
800     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
801     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
802     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
803     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
804 
805     /*
806      Create scatter context for scattering (updating) ghost values
807      */
808     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
809     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
810     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
811     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
812     ierr = ISDestroy(&to);CHKERRQ(ierr);
813     ierr = ISDestroy(&from);CHKERRQ(ierr);
814 
815     /* set local to global mapping for ghosted vector */
816     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
817     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
818 
819     for (i=0; i<n; i++)      indices[i]   = rstart + i;
820     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
821 
822     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
823     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
824     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
825   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
826   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
827   PetscFunctionReturn(0);
828 }
829 
830 /* ------------------------------------------------------------------------------------------*/
831 /*@C
832    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
833    the caller allocates the array space. Indices in the ghost region are based on blocks.
834 
835    Collective
836 
837    Input Parameters:
838 +  comm - the MPI communicator to use
839 .  bs - block size
840 .  n - local vector length
841 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
842 .  nghost - number of local ghost blocks
843 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
844 -  array - the space to store the vector values (as long as n + nghost*bs)
845 
846    Output Parameter:
847 .  vv - the global vector representation (without ghost points as part of vector)
848 
849    Notes:
850    Use VecGhostGetLocalForm() to access the local, ghosted representation
851    of the vector.
852 
853    n is the local vector size (total local size not the number of blocks) while nghost
854    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
855    portion is bs*nghost
856 
857    Level: advanced
858 
859 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
860           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
861           VecCreateGhostWithArray(), VecCreateGhostBlock()
862 
863 @*/
864 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
865 {
866   PetscErrorCode         ierr;
867   Vec_MPI                *w;
868   PetscScalar            *larray;
869   IS                     from,to;
870   ISLocalToGlobalMapping ltog;
871   PetscInt               rstart,i,nb,*indices;
872 
873   PetscFunctionBegin;
874   *vv = NULL;
875 
876   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
877   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
878   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
879   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
880   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
881   /* Create global representation */
882   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
883   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
884   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
885   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
886   w    = (Vec_MPI*)(*vv)->data;
887   /* Create local representation */
888   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
889   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
890   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
891   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
892 
893   /*
894        Create scatter context for scattering (updating) ghost values
895   */
896   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
897   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
898   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
899   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
900   ierr = ISDestroy(&to);CHKERRQ(ierr);
901   ierr = ISDestroy(&from);CHKERRQ(ierr);
902 
903   /* set local to global mapping for ghosted vector */
904   nb     = n/bs;
905   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
906   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
907   rstart = rstart/bs;
908 
909   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
910   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
911 
912   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
913   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
914   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
915   PetscFunctionReturn(0);
916 }
917 
918 /*@
919    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
920         The indicing of the ghost points is done with blocks.
921 
922    Collective
923 
924    Input Parameters:
925 +  comm - the MPI communicator to use
926 .  bs - the block size
927 .  n - local vector length
928 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
929 .  nghost - number of local ghost blocks
930 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
931 
932    Output Parameter:
933 .  vv - the global vector representation (without ghost points as part of vector)
934 
935    Notes:
936    Use VecGhostGetLocalForm() to access the local, ghosted representation
937    of the vector.
938 
939    n is the local vector size (total local size not the number of blocks) while nghost
940    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
941    portion is bs*nghost
942 
943    Level: advanced
944 
945 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
946           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
947           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
948 
949 @*/
950 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
951 {
952   PetscErrorCode ierr;
953 
954   PetscFunctionBegin;
955   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958