xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d21f2a476e879383c6c278d987e40b1dd508a024)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = PetscAbs(win->map->bs);
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "VecAssemblySend_MPI_Private"
103 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
104 {
105   Vec X = (Vec)ctx;
106   Vec_MPI *x = (Vec_MPI*)X->data;
107   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
108   PetscInt bs = X->map->bs;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
113    * we have to send them this time if we sent them before because the receiver is expecting them. */
114   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
115     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
116     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
117   }
118   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
119     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
120     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
121   }
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "VecAssemblyRecv_MPI_Private"
127 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
128 {
129   Vec X = (Vec)ctx;
130   Vec_MPI *x = (Vec_MPI*)X->data;
131   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
132   PetscErrorCode ierr;
133   PetscInt bs = X->map->bs;
134   VecAssemblyFrame *frame;
135 
136   PetscFunctionBegin;
137   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
138 
139   if (hdr->count) {
140     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
141     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
142     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
143     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144     frame->pendings = 2;
145   } else {
146     frame->ints = NULL;
147     frame->scalars = NULL;
148     frame->pendings = 0;
149   }
150 
151   if (hdr->bcount) {
152     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
153     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
154     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
155     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
156     frame->pendingb = 2;
157   } else {
158     frame->intb = NULL;
159     frame->scalarb = NULL;
160     frame->pendingb = 0;
161   }
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "VecAssemblyBegin_MPI_BTS"
167 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
168 {
169   Vec_MPI        *x = (Vec_MPI*)X->data;
170   PetscErrorCode ierr;
171   MPI_Comm       comm;
172   PetscInt       i,j,jb,bs;
173 
174   PetscFunctionBegin;
175   if (X->stash.donotstash) PetscFunctionReturn(0);
176 
177   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
178   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
179 #if defined(PETSC_USE_DEBUG)
180   {
181     InsertMode addv;
182     ierr = MPI_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
183     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
184   }
185 #endif
186   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
187 
188   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
189   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
190 
191   if (!x->sendranks) {
192     PetscMPIInt nowners,bnowners,*owners,*bowners;
193     PetscInt ntmp;
194     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
195     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
196     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
197     x->nsendranks = ntmp;
198     ierr = PetscFree(owners);CHKERRQ(ierr);
199     ierr = PetscFree(bowners);CHKERRQ(ierr);
200     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
201     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
202   }
203   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
204     PetscMPIInt rank = x->sendranks[i];
205     x->sendhdr[i].insertmode = X->stash.insertmode;
206     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
207      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
208      * there is nothing new to send, so that size-zero messages get sent instead. */
209     x->sendhdr[i].count = 0;
210     if (X->stash.n) {
211       x->sendptrs[i].ints    = &X->stash.idx[j];
212       x->sendptrs[i].scalars = &X->stash.array[j];
213       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
214     }
215     x->sendhdr[i].bcount = 0;
216     if (X->bstash.n) {
217       x->sendptrs[i].intb    = &X->bstash.idx[jb];
218       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
219       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
220     }
221   }
222 
223   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
224   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
225   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
226   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
227     PetscMPIInt tag[4];
228     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
229     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
230     for (i=0; i<x->nsendranks; i++) {
231       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
232     }
233     for (i=0; i<x->nrecvranks; i++) {
234       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
235     }
236     x->use_status = PETSC_TRUE;
237   } else {                      /* First time */
238     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
239     x->use_status = PETSC_FALSE;
240   }
241 
242   {
243     PetscInt nstash,reallocs;
244     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
245     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
246     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
247     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
248   }
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "VecAssemblyEnd_MPI_BTS"
254 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
255 {
256   Vec_MPI *x = (Vec_MPI*)X->data;
257   PetscInt bs = X->map->bs;
258   PetscMPIInt npending,*some_indices,r;
259   MPI_Status  *some_statuses;
260   PetscScalar *xarray;
261   PetscErrorCode ierr;
262   VecAssemblyFrame *frame;
263 
264   PetscFunctionBegin;
265   if (X->stash.donotstash) {
266     X->stash.insertmode = NOT_SET_VALUES;
267     X->bstash.insertmode = NOT_SET_VALUES;
268     PetscFunctionReturn(0);
269   }
270 
271   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
272   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
273   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
274   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
275   while (npending>0) {
276     PetscMPIInt ndone,ii;
277     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
278      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
279      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
280      * subsequent assembly can set a proper subset of the values. */
281     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
282     for (ii=0; ii<ndone; ii++) {
283       PetscInt i = some_indices[ii]/4,j,k;
284       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
285       PetscInt *recvint;
286       PetscScalar *recvscalar;
287       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
288       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
289       npending--;
290       if (!blockmsg) { /* Scalar stash */
291         PetscMPIInt count;
292         if (--frame[i].pendings > 0) continue;
293         if (x->use_status) {
294           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
295         } else count = x->recvhdr[i].count;
296         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
297           PetscInt loc = *recvint - X->map->rstart;
298           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
299           switch (imode) {
300           case ADD_VALUES:
301             xarray[loc] += *recvscalar++;
302             break;
303           case INSERT_VALUES:
304             xarray[loc] = *recvscalar++;
305             break;
306           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
307           }
308         }
309       } else {                  /* Block stash */
310         PetscMPIInt count;
311         if (--frame[i].pendingb > 0) continue;
312         if (x->use_status) {
313           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
314           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
315         } else count = x->recvhdr[i].bcount;
316         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
317           PetscInt loc = (*recvint)*bs - X->map->rstart;
318           switch (imode) {
319           case ADD_VALUES:
320             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
321             break;
322           case INSERT_VALUES:
323             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
324             break;
325           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
326           }
327         }
328       }
329     }
330   }
331   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
332   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
333   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
334   if (x->assembly_subset) {
335     void *dummy;                /* reset segbuffers */
336     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
337     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
338   } else {
339     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
340   }
341 
342   X->stash.insertmode = NOT_SET_VALUES;
343   X->bstash.insertmode = NOT_SET_VALUES;
344   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
345   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "VecAssemblyReset_MPI"
351 PetscErrorCode VecAssemblyReset_MPI(Vec X)
352 {
353   Vec_MPI *x = (Vec_MPI*)X->data;
354   PetscErrorCode ierr;
355 
356   PetscFunctionBegin;
357   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
358   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
359   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
360   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
361   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
362   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
363   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
364   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
365   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
366   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "VecSetFromOptions_MPI"
373 static PetscErrorCode VecSetFromOptions_MPI(PetscOptions *PetscOptionsObject,Vec X)
374 {
375   PetscErrorCode ierr;
376   PetscBool      flg = PETSC_FALSE,set;
377 
378   PetscFunctionBegin;
379   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
380   if (set) {
381     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
382     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
383   }
384   PetscFunctionReturn(0);
385 }
386 
387 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
388                                 VecDuplicateVecs_Default,
389                                 VecDestroyVecs_Default,
390                                 VecDot_MPI,
391                                 VecMDot_MPI,
392                                 VecNorm_MPI,
393                                 VecTDot_MPI,
394                                 VecMTDot_MPI,
395                                 VecScale_Seq,
396                                 VecCopy_Seq, /* 10 */
397                                 VecSet_Seq,
398                                 VecSwap_Seq,
399                                 VecAXPY_Seq,
400                                 VecAXPBY_Seq,
401                                 VecMAXPY_Seq,
402                                 VecAYPX_Seq,
403                                 VecWAXPY_Seq,
404                                 VecAXPBYPCZ_Seq,
405                                 VecPointwiseMult_Seq,
406                                 VecPointwiseDivide_Seq,
407                                 VecSetValues_MPI, /* 20 */
408                                 VecAssemblyBegin_MPI_BTS,
409                                 VecAssemblyEnd_MPI_BTS,
410                                 0,
411                                 VecGetSize_MPI,
412                                 VecGetSize_Seq,
413                                 0,
414                                 VecMax_MPI,
415                                 VecMin_MPI,
416                                 VecSetRandom_Seq,
417                                 VecSetOption_MPI,
418                                 VecSetValuesBlocked_MPI,
419                                 VecDestroy_MPI,
420                                 VecView_MPI,
421                                 VecPlaceArray_MPI,
422                                 VecReplaceArray_Seq,
423                                 VecDot_Seq,
424                                 VecTDot_Seq,
425                                 VecNorm_Seq,
426                                 VecMDot_Seq,
427                                 VecMTDot_Seq,
428                                 VecLoad_Default,
429                                 VecReciprocal_Default,
430                                 VecConjugate_Seq,
431                                 0,
432                                 0,
433                                 VecResetArray_MPI,
434                                 VecSetFromOptions_MPI,
435                                 VecMaxPointwiseDivide_Seq,
436                                 VecPointwiseMax_Seq,
437                                 VecPointwiseMaxAbs_Seq,
438                                 VecPointwiseMin_Seq,
439                                 VecGetValues_MPI,
440                                 0,
441                                 0,
442                                 0,
443                                 0,
444                                 0,
445                                 0,
446                                 VecStrideGather_Default,
447                                 VecStrideScatter_Default,
448                                 0,
449                                 0,
450                                 0,
451                                 0,
452                                 0,
453                                 VecStrideSubSetGather_Default,
454                                 VecStrideSubSetScatter_Default,
455                                 0,
456                                 0
457 };
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "VecCreate_MPI_Private"
461 /*
462     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
463     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
464     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
465 
466     If alloc is true and array is NULL then this routine allocates the space, otherwise
467     no space is allocated.
468 */
469 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
470 {
471   Vec_MPI        *s;
472   PetscErrorCode ierr;
473 
474   PetscFunctionBegin;
475   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
476   v->data        = (void*)s;
477   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
478   s->nghost      = nghost;
479   v->petscnative = PETSC_TRUE;
480 
481   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
482 
483   s->array           = (PetscScalar*)array;
484   s->array_allocated = 0;
485   if (alloc && !array) {
486     PetscInt n = v->map->n+nghost;
487     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
488     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
489     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
490     s->array_allocated = s->array;
491   }
492 
493   /* By default parallel vectors do not have local representation */
494   s->localrep    = 0;
495   s->localupdate = 0;
496 
497   v->stash.insertmode = NOT_SET_VALUES;
498   v->bstash.insertmode = NOT_SET_VALUES;
499   /* create the stashes. The block-size for bstash is set later when
500      VecSetValuesBlocked is called.
501   */
502   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
503   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
504 
505 #if defined(PETSC_HAVE_MATLAB_ENGINE)
506   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
507   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
508 #endif
509   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 /*MC
514    VECMPI - VECMPI = "mpi" - The basic parallel vector
515 
516    Options Database Keys:
517 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
518 
519   Level: beginner
520 
521 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
522 M*/
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "VecCreate_MPI"
526 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
527 {
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
532   PetscFunctionReturn(0);
533 }
534 
535 /*MC
536    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
537 
538    Options Database Keys:
539 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
540 
541   Level: beginner
542 
543 .seealso: VecCreateSeq(), VecCreateMPI()
544 M*/
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "VecCreate_Standard"
548 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
549 {
550   PetscErrorCode ierr;
551   PetscMPIInt    size;
552 
553   PetscFunctionBegin;
554   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
555   if (size == 1) {
556     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
557   } else {
558     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
559   }
560   PetscFunctionReturn(0);
561 }
562 
563 #undef __FUNCT__
564 #define __FUNCT__ "VecCreateMPIWithArray"
565 /*@C
566    VecCreateMPIWithArray - Creates a parallel, array-style vector,
567    where the user provides the array space to store the vector values.
568 
569    Collective on MPI_Comm
570 
571    Input Parameters:
572 +  comm  - the MPI communicator to use
573 .  bs    - block size, same meaning as VecSetBlockSize()
574 .  n     - local vector length, cannot be PETSC_DECIDE
575 .  N     - global vector length (or PETSC_DECIDE to have calculated)
576 -  array - the user provided array to store the vector values
577 
578    Output Parameter:
579 .  vv - the vector
580 
581    Notes:
582    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
583    same type as an existing vector.
584 
585    If the user-provided array is NULL, then VecPlaceArray() can be used
586    at a later stage to SET the array for storing the vector values.
587 
588    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
589    The user should not free the array until the vector is destroyed.
590 
591    Level: intermediate
592 
593    Concepts: vectors^creating with array
594 
595 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
596           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
597 
598 @*/
599 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
600 {
601   PetscErrorCode ierr;
602 
603   PetscFunctionBegin;
604   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
605   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
606   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
607   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
608   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
609   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "VecCreateGhostWithArray"
615 /*@C
616    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
617    the caller allocates the array space.
618 
619    Collective on MPI_Comm
620 
621    Input Parameters:
622 +  comm - the MPI communicator to use
623 .  n - local vector length
624 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
625 .  nghost - number of local ghost points
626 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
627 -  array - the space to store the vector values (as long as n + nghost)
628 
629    Output Parameter:
630 .  vv - the global vector representation (without ghost points as part of vector)
631 
632    Notes:
633    Use VecGhostGetLocalForm() to access the local, ghosted representation
634    of the vector.
635 
636    This also automatically sets the ISLocalToGlobalMapping() for this vector.
637 
638    Level: advanced
639 
640    Concepts: vectors^creating with array
641    Concepts: vectors^ghosted
642 
643 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
644           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
645           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
646 
647 @*/
648 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
649 {
650   PetscErrorCode         ierr;
651   Vec_MPI                *w;
652   PetscScalar            *larray;
653   IS                     from,to;
654   ISLocalToGlobalMapping ltog;
655   PetscInt               rstart,i,*indices;
656 
657   PetscFunctionBegin;
658   *vv = 0;
659 
660   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
661   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
662   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
663   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
664   /* Create global representation */
665   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
666   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
667   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
668   w    = (Vec_MPI*)(*vv)->data;
669   /* Create local representation */
670   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
671   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
672   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
673   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
674 
675   /*
676        Create scatter context for scattering (updating) ghost values
677   */
678   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
679   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
680   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
681   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
682   ierr = ISDestroy(&to);CHKERRQ(ierr);
683   ierr = ISDestroy(&from);CHKERRQ(ierr);
684 
685   /* set local to global mapping for ghosted vector */
686   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
687   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
688   for (i=0; i<n; i++) {
689     indices[i] = rstart + i;
690   }
691   for (i=0; i<nghost; i++) {
692     indices[n+i] = ghosts[i];
693   }
694   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
695   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
696   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
697   PetscFunctionReturn(0);
698 }
699 
700 #undef __FUNCT__
701 #define __FUNCT__ "VecCreateGhost"
702 /*@
703    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
704 
705    Collective on MPI_Comm
706 
707    Input Parameters:
708 +  comm - the MPI communicator to use
709 .  n - local vector length
710 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
711 .  nghost - number of local ghost points
712 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
713 
714    Output Parameter:
715 .  vv - the global vector representation (without ghost points as part of vector)
716 
717    Notes:
718    Use VecGhostGetLocalForm() to access the local, ghosted representation
719    of the vector.
720 
721    This also automatically sets the ISLocalToGlobalMapping() for this vector.
722 
723    Level: advanced
724 
725    Concepts: vectors^ghosted
726 
727 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
728           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
729           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
730           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
731 
732 @*/
733 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
734 {
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "VecMPISetGhost"
744 /*@
745    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
746 
747    Collective on Vec
748 
749    Input Parameters:
750 +  vv - the MPI vector
751 .  nghost - number of local ghost points
752 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
753 
754 
755    Notes:
756    Use VecGhostGetLocalForm() to access the local, ghosted representation
757    of the vector.
758 
759    This also automatically sets the ISLocalToGlobalMapping() for this vector.
760 
761    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
762 
763    Level: advanced
764 
765    Concepts: vectors^ghosted
766 
767 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
768           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
769           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
770           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
771 
772 @*/
773 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
774 {
775   PetscErrorCode ierr;
776   PetscBool      flg;
777 
778   PetscFunctionBegin;
779   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
780   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
781   if (flg) {
782     PetscInt               n,N;
783     Vec_MPI                *w;
784     PetscScalar            *larray;
785     IS                     from,to;
786     ISLocalToGlobalMapping ltog;
787     PetscInt               rstart,i,*indices;
788     MPI_Comm               comm;
789 
790     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
791     n    = vv->map->n;
792     N    = vv->map->N;
793     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
794     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
795     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
796     w    = (Vec_MPI*)(vv)->data;
797     /* Create local representation */
798     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
799     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
800     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
801     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
802 
803     /*
804      Create scatter context for scattering (updating) ghost values
805      */
806     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
807     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
808     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
809     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
810     ierr = ISDestroy(&to);CHKERRQ(ierr);
811     ierr = ISDestroy(&from);CHKERRQ(ierr);
812 
813     /* set local to global mapping for ghosted vector */
814     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
815     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
816 
817     for (i=0; i<n; i++)      indices[i]   = rstart + i;
818     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
819 
820     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
821     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
822     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
823   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
824   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
825   PetscFunctionReturn(0);
826 }
827 
828 
829 /* ------------------------------------------------------------------------------------------*/
830 #undef __FUNCT__
831 #define __FUNCT__ "VecCreateGhostBlockWithArray"
832 /*@C
833    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
834    the caller allocates the array space. Indices in the ghost region are based on blocks.
835 
836    Collective on MPI_Comm
837 
838    Input Parameters:
839 +  comm - the MPI communicator to use
840 .  bs - block size
841 .  n - local vector length
842 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
843 .  nghost - number of local ghost blocks
844 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
845 -  array - the space to store the vector values (as long as n + nghost*bs)
846 
847    Output Parameter:
848 .  vv - the global vector representation (without ghost points as part of vector)
849 
850    Notes:
851    Use VecGhostGetLocalForm() to access the local, ghosted representation
852    of the vector.
853 
854    n is the local vector size (total local size not the number of blocks) while nghost
855    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
856    portion is bs*nghost
857 
858    Level: advanced
859 
860    Concepts: vectors^creating ghosted
861    Concepts: vectors^creating with array
862 
863 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
864           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
865           VecCreateGhostWithArray(), VecCreateGhostBlock()
866 
867 @*/
868 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
869 {
870   PetscErrorCode         ierr;
871   Vec_MPI                *w;
872   PetscScalar            *larray;
873   IS                     from,to;
874   ISLocalToGlobalMapping ltog;
875   PetscInt               rstart,i,nb,*indices;
876 
877   PetscFunctionBegin;
878   *vv = 0;
879 
880   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
881   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
882   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
883   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
884   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
885   /* Create global representation */
886   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
887   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
888   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
889   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
890   w    = (Vec_MPI*)(*vv)->data;
891   /* Create local representation */
892   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
893   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
894   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
895   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
896 
897   /*
898        Create scatter context for scattering (updating) ghost values
899   */
900   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
901   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
902   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
903   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
904   ierr = ISDestroy(&to);CHKERRQ(ierr);
905   ierr = ISDestroy(&from);CHKERRQ(ierr);
906 
907   /* set local to global mapping for ghosted vector */
908   nb   = n/bs;
909   ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
910   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
911 
912   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
913   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
914 
915   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
916   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
917   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 #undef __FUNCT__
922 #define __FUNCT__ "VecCreateGhostBlock"
923 /*@
924    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
925         The indicing of the ghost points is done with blocks.
926 
927    Collective on MPI_Comm
928 
929    Input Parameters:
930 +  comm - the MPI communicator to use
931 .  bs - the block size
932 .  n - local vector length
933 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
934 .  nghost - number of local ghost blocks
935 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
936 
937    Output Parameter:
938 .  vv - the global vector representation (without ghost points as part of vector)
939 
940    Notes:
941    Use VecGhostGetLocalForm() to access the local, ghosted representation
942    of the vector.
943 
944    n is the local vector size (total local size not the number of blocks) while nghost
945    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
946    portion is bs*nghost
947 
948    Level: advanced
949 
950    Concepts: vectors^ghosted
951 
952 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
953           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
954           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
955 
956 @*/
957 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
958 {
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965