xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision ee12ae39415b2e672d944cdca066227dadbf8b14)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,NULL);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   switch (op) {
97   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
98     break;
99   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
100     break;
101   case VEC_SUBSET_OFF_PROC_ENTRIES:
102     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
103     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
104       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
105       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
106     }
107     break;
108   }
109 
110   PetscFunctionReturn(0);
111 }
112 
113 
114 static PetscErrorCode VecResetArray_MPI(Vec vin)
115 {
116   Vec_MPI        *v = (Vec_MPI*)vin->data;
117   PetscErrorCode ierr;
118 
119   PetscFunctionBegin;
120   v->array         = v->unplacedarray;
121   v->unplacedarray = NULL;
122   if (v->localrep) {
123     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
129 {
130   Vec X = (Vec)ctx;
131   Vec_MPI *x = (Vec_MPI*)X->data;
132   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
133   PetscInt bs = X->map->bs;
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
138      messages can be empty, but we have to send them this time if we sent them before because the
139      receiver is expecting them.
140    */
141   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
142     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
143     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
144   }
145   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
146     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
147     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
153 {
154   Vec X = (Vec)ctx;
155   Vec_MPI *x = (Vec_MPI*)X->data;
156   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
157   PetscErrorCode ierr;
158   PetscInt bs = X->map->bs;
159   VecAssemblyFrame *frame;
160 
161   PetscFunctionBegin;
162   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
163 
164   if (hdr->count) {
165     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRMPI(ierr);
167     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRMPI(ierr);
169     frame->pendings = 2;
170   } else {
171     frame->ints = NULL;
172     frame->scalars = NULL;
173     frame->pendings = 0;
174   }
175 
176   if (hdr->bcount) {
177     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRMPI(ierr);
179     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
180     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRMPI(ierr);
181     frame->pendingb = 2;
182   } else {
183     frame->intb = NULL;
184     frame->scalarb = NULL;
185     frame->pendingb = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
191 {
192   Vec_MPI        *x = (Vec_MPI*)X->data;
193   PetscErrorCode ierr;
194   MPI_Comm       comm;
195   PetscInt       i,j,jb,bs;
196 
197   PetscFunctionBegin;
198   if (X->stash.donotstash) PetscFunctionReturn(0);
199 
200   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
201   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
202   if (PetscDefined(USE_DEBUG)) {
203     InsertMode addv;
204     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
205     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
206   }
207   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
208 
209   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
210   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
211 
212   if (!x->sendranks) {
213     PetscMPIInt nowners,bnowners,*owners,*bowners;
214     PetscInt ntmp;
215     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
216     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
217     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
218     x->nsendranks = ntmp;
219     ierr = PetscFree(owners);CHKERRQ(ierr);
220     ierr = PetscFree(bowners);CHKERRQ(ierr);
221     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
222     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
223   }
224   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
225     PetscMPIInt rank = x->sendranks[i];
226     x->sendhdr[i].insertmode = X->stash.insertmode;
227     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
228      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
229      * there is nothing new to send, so that size-zero messages get sent instead. */
230     x->sendhdr[i].count = 0;
231     if (X->stash.n) {
232       x->sendptrs[i].ints    = &X->stash.idx[j];
233       x->sendptrs[i].scalars = &X->stash.array[j];
234       for (; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
235     }
236     x->sendhdr[i].bcount = 0;
237     if (X->bstash.n) {
238       x->sendptrs[i].intb    = &X->bstash.idx[jb];
239       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
240       for (; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
241     }
242   }
243 
244   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
245   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
246   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
247   if (x->first_assembly_done) { /* this is not the first assembly */
248     PetscMPIInt tag[4];
249     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
250     for (i=0; i<x->nsendranks; i++) {
251       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
252     }
253     for (i=0; i<x->nrecvranks; i++) {
254       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
255     }
256     x->use_status = PETSC_TRUE;
257   } else { /* First time assembly */
258     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
259     x->use_status = PETSC_FALSE;
260   }
261 
262   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
263      This line says when assembly_subset is set, then we mark that the first assembly is done.
264    */
265   x->first_assembly_done = x->assembly_subset;
266 
267   {
268     PetscInt nstash,reallocs;
269     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
270     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
271     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
272     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
273   }
274   PetscFunctionReturn(0);
275 }
276 
277 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
278 {
279   Vec_MPI *x = (Vec_MPI*)X->data;
280   PetscInt bs = X->map->bs;
281   PetscMPIInt npending,*some_indices,r;
282   MPI_Status  *some_statuses;
283   PetscScalar *xarray;
284   PetscErrorCode ierr;
285   VecAssemblyFrame *frame;
286 
287   PetscFunctionBegin;
288   if (X->stash.donotstash) {
289     X->stash.insertmode = NOT_SET_VALUES;
290     X->bstash.insertmode = NOT_SET_VALUES;
291     PetscFunctionReturn(0);
292   }
293 
294   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
295   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
296   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
297   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
298   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
299   while (npending>0) {
300     PetscMPIInt ndone=0,ii;
301     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
302      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
303      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
304      * subsequent assembly can set a proper subset of the values. */
305     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
306     for (ii=0; ii<ndone; ii++) {
307       PetscInt i = some_indices[ii]/4,j,k;
308       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
309       PetscInt *recvint;
310       PetscScalar *recvscalar;
311       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
312       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
313       npending--;
314       if (!blockmsg) { /* Scalar stash */
315         PetscMPIInt count;
316         if (--frame[i].pendings > 0) continue;
317         if (x->use_status) {
318           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
319         } else count = x->recvhdr[i].count;
320         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
321           PetscInt loc = *recvint - X->map->rstart;
322           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
323           switch (imode) {
324           case ADD_VALUES:
325             xarray[loc] += *recvscalar++;
326             break;
327           case INSERT_VALUES:
328             xarray[loc] = *recvscalar++;
329             break;
330           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
331           }
332         }
333       } else {                  /* Block stash */
334         PetscMPIInt count;
335         if (--frame[i].pendingb > 0) continue;
336         if (x->use_status) {
337           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRMPI(ierr);
338           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
339         } else count = x->recvhdr[i].bcount;
340         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
341           PetscInt loc = (*recvint)*bs - X->map->rstart;
342           switch (imode) {
343           case ADD_VALUES:
344             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
345             break;
346           case INSERT_VALUES:
347             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
348             break;
349           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
350           }
351         }
352       }
353     }
354   }
355   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
356   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRMPI(ierr);
357   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
358   if (x->assembly_subset) {
359     void *dummy;                /* reset segbuffers */
360     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
361     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
362   } else {
363     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
364   }
365 
366   X->stash.insertmode = NOT_SET_VALUES;
367   X->bstash.insertmode = NOT_SET_VALUES;
368   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
369   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
370   PetscFunctionReturn(0);
371 }
372 
373 PetscErrorCode VecAssemblyReset_MPI(Vec X)
374 {
375   Vec_MPI *x = (Vec_MPI*)X->data;
376   PetscErrorCode ierr;
377 
378   PetscFunctionBegin;
379   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
380   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
381   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
384   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
385   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
386   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
387   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
388   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
389   PetscFunctionReturn(0);
390 }
391 
392 
393 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
394 {
395 #if !defined(PETSC_HAVE_MPIUNI)
396   PetscErrorCode ierr;
397   PetscBool      flg = PETSC_FALSE,set;
398 
399   PetscFunctionBegin;
400   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
401   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
402   if (set) {
403     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
404     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
405   }
406   ierr = PetscOptionsTail();CHKERRQ(ierr);
407 #else
408   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
409   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
410 #endif
411   PetscFunctionReturn(0);
412 }
413 
414 
415 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
416                                 VecDuplicateVecs_Default,
417                                 VecDestroyVecs_Default,
418                                 VecDot_MPI,
419                                 VecMDot_MPI,
420                                 VecNorm_MPI,
421                                 VecTDot_MPI,
422                                 VecMTDot_MPI,
423                                 VecScale_Seq,
424                                 VecCopy_Seq, /* 10 */
425                                 VecSet_Seq,
426                                 VecSwap_Seq,
427                                 VecAXPY_Seq,
428                                 VecAXPBY_Seq,
429                                 VecMAXPY_Seq,
430                                 VecAYPX_Seq,
431                                 VecWAXPY_Seq,
432                                 VecAXPBYPCZ_Seq,
433                                 VecPointwiseMult_Seq,
434                                 VecPointwiseDivide_Seq,
435                                 VecSetValues_MPI, /* 20 */
436                                 VecAssemblyBegin_MPI_BTS,
437                                 VecAssemblyEnd_MPI_BTS,
438                                 NULL,
439                                 VecGetSize_MPI,
440                                 VecGetSize_Seq,
441                                 NULL,
442                                 VecMax_MPI,
443                                 VecMin_MPI,
444                                 VecSetRandom_Seq,
445                                 VecSetOption_MPI,
446                                 VecSetValuesBlocked_MPI,
447                                 VecDestroy_MPI,
448                                 VecView_MPI,
449                                 VecPlaceArray_MPI,
450                                 VecReplaceArray_Seq,
451                                 VecDot_Seq,
452                                 VecTDot_Seq,
453                                 VecNorm_Seq,
454                                 VecMDot_Seq,
455                                 VecMTDot_Seq,
456                                 VecLoad_Default,
457                                 VecReciprocal_Default,
458                                 VecConjugate_Seq,
459                                 NULL,
460                                 NULL,
461                                 VecResetArray_MPI,
462                                 VecSetFromOptions_MPI,/*set from options */
463                                 VecMaxPointwiseDivide_Seq,
464                                 VecPointwiseMax_Seq,
465                                 VecPointwiseMaxAbs_Seq,
466                                 VecPointwiseMin_Seq,
467                                 VecGetValues_MPI,
468                                 NULL,
469                                 NULL,
470                                 NULL,
471                                 NULL,
472                                 NULL,
473                                 NULL,
474                                 VecStrideGather_Default,
475                                 VecStrideScatter_Default,
476                                 NULL,
477                                 NULL,
478                                 NULL,
479                                 NULL,
480                                 NULL,
481                                 VecStrideSubSetGather_Default,
482                                 VecStrideSubSetScatter_Default,
483                                 NULL,
484                                 NULL,
485                                 NULL
486 };
487 
488 /*
489     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
490     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
491     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
492 
493     If alloc is true and array is NULL then this routine allocates the space, otherwise
494     no space is allocated.
495 */
496 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
497 {
498   Vec_MPI        *s;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
503   v->data        = (void*)s;
504   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
505   s->nghost      = nghost;
506   v->petscnative = PETSC_TRUE;
507   if (array) v->offloadmask = PETSC_OFFLOAD_CPU;
508 
509   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
510 
511   s->array           = (PetscScalar*)array;
512   s->array_allocated = NULL;
513   if (alloc && !array) {
514     PetscInt n = v->map->n+nghost;
515     ierr               = PetscCalloc1(n,&s->array);CHKERRQ(ierr);
516     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
517     s->array_allocated = s->array;
518   }
519 
520   /* By default parallel vectors do not have local representation */
521   s->localrep    = NULL;
522   s->localupdate = NULL;
523 
524   v->stash.insertmode = NOT_SET_VALUES;
525   v->bstash.insertmode = NOT_SET_VALUES;
526   /* create the stashes. The block-size for bstash is set later when
527      VecSetValuesBlocked is called.
528   */
529   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
530   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
531 
532 #if defined(PETSC_HAVE_MATLAB_ENGINE)
533   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
534   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
535 #endif
536   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
537   PetscFunctionReturn(0);
538 }
539 
540 /*MC
541    VECMPI - VECMPI = "mpi" - The basic parallel vector
542 
543    Options Database Keys:
544 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
545 
546   Level: beginner
547 
548 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
549 M*/
550 
551 PetscErrorCode VecCreate_MPI(Vec vv)
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,NULL);CHKERRQ(ierr);
557   PetscFunctionReturn(0);
558 }
559 
560 /*MC
561    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
562 
563    Options Database Keys:
564 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
565 
566   Level: beginner
567 
568 .seealso: VecCreateSeq(), VecCreateMPI()
569 M*/
570 
571 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
572 {
573   PetscErrorCode ierr;
574   PetscMPIInt    size;
575 
576   PetscFunctionBegin;
577   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRMPI(ierr);
578   if (size == 1) {
579     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
580   } else {
581     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
582   }
583   PetscFunctionReturn(0);
584 }
585 
586 /*@C
587    VecCreateMPIWithArray - Creates a parallel, array-style vector,
588    where the user provides the array space to store the vector values.
589 
590    Collective
591 
592    Input Parameters:
593 +  comm  - the MPI communicator to use
594 .  bs    - block size, same meaning as VecSetBlockSize()
595 .  n     - local vector length, cannot be PETSC_DECIDE
596 .  N     - global vector length (or PETSC_DECIDE to have calculated)
597 -  array - the user provided array to store the vector values
598 
599    Output Parameter:
600 .  vv - the vector
601 
602    Notes:
603    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
604    same type as an existing vector.
605 
606    If the user-provided array is NULL, then VecPlaceArray() can be used
607    at a later stage to SET the array for storing the vector values.
608 
609    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
610    The user should not free the array until the vector is destroyed.
611 
612    Level: intermediate
613 
614 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
615           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
616 
617 @*/
618 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
619 {
620   PetscErrorCode ierr;
621 
622   PetscFunctionBegin;
623   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
624   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
625   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
626   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
627   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
628   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
629   PetscFunctionReturn(0);
630 }
631 
632 /*@C
633    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
634    the caller allocates the array space.
635 
636    Collective
637 
638    Input Parameters:
639 +  comm - the MPI communicator to use
640 .  n - local vector length
641 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
642 .  nghost - number of local ghost points
643 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
644 -  array - the space to store the vector values (as long as n + nghost)
645 
646    Output Parameter:
647 .  vv - the global vector representation (without ghost points as part of vector)
648 
649    Notes:
650    Use VecGhostGetLocalForm() to access the local, ghosted representation
651    of the vector.
652 
653    This also automatically sets the ISLocalToGlobalMapping() for this vector.
654 
655    Level: advanced
656 
657 
658 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
659           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
660           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
661 
662 @*/
663 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
664 {
665   PetscErrorCode         ierr;
666   Vec_MPI                *w;
667   PetscScalar            *larray;
668   IS                     from,to;
669   ISLocalToGlobalMapping ltog;
670   PetscInt               rstart,i,*indices;
671 
672   PetscFunctionBegin;
673   *vv = NULL;
674 
675   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
676   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
677   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
678   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
679   /* Create global representation */
680   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
681   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
682   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
683   w    = (Vec_MPI*)(*vv)->data;
684   /* Create local representation */
685   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
686   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
687   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
688   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
689 
690   /*
691        Create scatter context for scattering (updating) ghost values
692   */
693   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
694   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
695   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
696   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
697   ierr = ISDestroy(&to);CHKERRQ(ierr);
698   ierr = ISDestroy(&from);CHKERRQ(ierr);
699 
700   /* set local to global mapping for ghosted vector */
701   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
702   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
703   for (i=0; i<n; i++) {
704     indices[i] = rstart + i;
705   }
706   for (i=0; i<nghost; i++) {
707     indices[n+i] = ghosts[i];
708   }
709   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
710   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
711   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 /*@
716    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
717 
718    Collective
719 
720    Input Parameters:
721 +  comm - the MPI communicator to use
722 .  n - local vector length
723 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
724 .  nghost - number of local ghost points
725 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
726 
727    Output Parameter:
728 .  vv - the global vector representation (without ghost points as part of vector)
729 
730    Notes:
731    Use VecGhostGetLocalForm() to access the local, ghosted representation
732    of the vector.
733 
734    This also automatically sets the ISLocalToGlobalMapping() for this vector.
735 
736    Level: advanced
737 
738 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
739           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
740           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
741           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
742 
743 @*/
744 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 /*@
754    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
755 
756    Collective on Vec
757 
758    Input Parameters:
759 +  vv - the MPI vector
760 .  nghost - number of local ghost points
761 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
762 
763 
764    Notes:
765    Use VecGhostGetLocalForm() to access the local, ghosted representation
766    of the vector.
767 
768    This also automatically sets the ISLocalToGlobalMapping() for this vector.
769 
770    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
771 
772    Level: advanced
773 
774 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
775           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
776           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
777           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
778 
779 @*/
780 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
781 {
782   PetscErrorCode ierr;
783   PetscBool      flg;
784 
785   PetscFunctionBegin;
786   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
787   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
788   if (flg) {
789     PetscInt               n,N;
790     Vec_MPI                *w;
791     PetscScalar            *larray;
792     IS                     from,to;
793     ISLocalToGlobalMapping ltog;
794     PetscInt               rstart,i,*indices;
795     MPI_Comm               comm;
796 
797     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
798     n    = vv->map->n;
799     N    = vv->map->N;
800     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
801     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
802     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
803     w    = (Vec_MPI*)(vv)->data;
804     /* Create local representation */
805     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
806     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
807     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
808     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
809 
810     /*
811      Create scatter context for scattering (updating) ghost values
812      */
813     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
814     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
815     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
816     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
817     ierr = ISDestroy(&to);CHKERRQ(ierr);
818     ierr = ISDestroy(&from);CHKERRQ(ierr);
819 
820     /* set local to global mapping for ghosted vector */
821     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
822     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
823 
824     for (i=0; i<n; i++)      indices[i]   = rstart + i;
825     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
826 
827     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
828     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
829     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
830   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
831   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
832   PetscFunctionReturn(0);
833 }
834 
835 
836 /* ------------------------------------------------------------------------------------------*/
837 /*@C
838    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
839    the caller allocates the array space. Indices in the ghost region are based on blocks.
840 
841    Collective
842 
843    Input Parameters:
844 +  comm - the MPI communicator to use
845 .  bs - block size
846 .  n - local vector length
847 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
848 .  nghost - number of local ghost blocks
849 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
850 -  array - the space to store the vector values (as long as n + nghost*bs)
851 
852    Output Parameter:
853 .  vv - the global vector representation (without ghost points as part of vector)
854 
855    Notes:
856    Use VecGhostGetLocalForm() to access the local, ghosted representation
857    of the vector.
858 
859    n is the local vector size (total local size not the number of blocks) while nghost
860    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
861    portion is bs*nghost
862 
863    Level: advanced
864 
865 
866 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
867           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
868           VecCreateGhostWithArray(), VecCreateGhostBlock()
869 
870 @*/
871 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
872 {
873   PetscErrorCode         ierr;
874   Vec_MPI                *w;
875   PetscScalar            *larray;
876   IS                     from,to;
877   ISLocalToGlobalMapping ltog;
878   PetscInt               rstart,i,nb,*indices;
879 
880   PetscFunctionBegin;
881   *vv = NULL;
882 
883   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
884   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
885   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
886   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
887   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
888   /* Create global representation */
889   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
890   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
891   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
892   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
893   w    = (Vec_MPI*)(*vv)->data;
894   /* Create local representation */
895   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
896   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
897   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
898   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
899 
900   /*
901        Create scatter context for scattering (updating) ghost values
902   */
903   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
904   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
905   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
906   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
907   ierr = ISDestroy(&to);CHKERRQ(ierr);
908   ierr = ISDestroy(&from);CHKERRQ(ierr);
909 
910   /* set local to global mapping for ghosted vector */
911   nb     = n/bs;
912   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
913   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
914   rstart = rstart/bs;
915 
916   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
917   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
918 
919   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
920   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
921   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 /*@
926    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
927         The indicing of the ghost points is done with blocks.
928 
929    Collective
930 
931    Input Parameters:
932 +  comm - the MPI communicator to use
933 .  bs - the block size
934 .  n - local vector length
935 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
936 .  nghost - number of local ghost blocks
937 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
938 
939    Output Parameter:
940 .  vv - the global vector representation (without ghost points as part of vector)
941 
942    Notes:
943    Use VecGhostGetLocalForm() to access the local, ghosted representation
944    of the vector.
945 
946    n is the local vector size (total local size not the number of blocks) while nghost
947    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
948    portion is bs*nghost
949 
950    Level: advanced
951 
952 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
953           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
954           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
955 
956 @*/
957 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
958 {
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,NULL,vv);CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965