xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 3fd301dc9e62b1d8a5403f51ec458676c386c28d)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = PetscAbs(win->map->bs);
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "VecAssemblySend_MPI_Private"
103 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
104 {
105   Vec X = (Vec)ctx;
106   Vec_MPI *x = (Vec_MPI*)X->data;
107   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
108   PetscInt bs = X->map->bs;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
113    * we have to send them this time if we sent them before because the receiver is expecting them. */
114   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
115     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
116     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
117   }
118   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
119     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
120     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
121   }
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "VecAssemblyRecv_MPI_Private"
127 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
128 {
129   Vec X = (Vec)ctx;
130   Vec_MPI *x = (Vec_MPI*)X->data;
131   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
132   PetscErrorCode ierr;
133   PetscInt bs = X->map->bs;
134   VecAssemblyFrame *frame;
135 
136   PetscFunctionBegin;
137   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
138 
139   if (hdr->count) {
140     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
141     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
142     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
143     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144     frame->pendings = 2;
145   } else {
146     frame->ints = NULL;
147     frame->scalars = NULL;
148     frame->pendings = 0;
149   }
150 
151   if (hdr->bcount) {
152     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
153     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
154     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
155     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
156     frame->pendingb = 2;
157   } else {
158     frame->intb = NULL;
159     frame->scalarb = NULL;
160     frame->pendingb = 0;
161   }
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "VecAssemblyBegin_MPI_BTS"
167 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
168 {
169   Vec_MPI        *x = (Vec_MPI*)X->data;
170   PetscErrorCode ierr;
171   MPI_Comm       comm;
172   PetscInt       i,j,jb,bs;
173 
174   PetscFunctionBegin;
175   if (X->stash.donotstash) PetscFunctionReturn(0);
176 
177   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
178   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
179 #if defined(PETSC_USE_DEBUG)
180   {
181     InsertMode addv;
182     ierr = MPI_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
183     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
184   }
185 #endif
186 
187   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
188   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
189 
190   if (!x->sendranks) {
191     PetscMPIInt nowners,bnowners,*owners,*bowners;
192     PetscInt ntmp;
193     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
194     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
195     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
196     x->nsendranks = ntmp;
197     ierr = PetscFree(owners);CHKERRQ(ierr);
198     ierr = PetscFree(bowners);CHKERRQ(ierr);
199     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
200     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
201   }
202   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
203     PetscMPIInt rank = x->sendranks[i];
204     x->sendhdr[i].insertmode = X->stash.insertmode;
205     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
206      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
207      * there is nothing new to send, so that size-zero messages get sent instead. */
208     x->sendhdr[i].count = 0;
209     if (X->stash.n) {
210       x->sendptrs[i].ints    = &X->stash.idx[j];
211       x->sendptrs[i].scalars = &X->stash.array[j];
212       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
213     }
214     x->sendhdr[i].bcount = 0;
215     if (X->bstash.n) {
216       x->sendptrs[i].intb    = &X->bstash.idx[jb];
217       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
218       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
219     }
220   }
221 
222   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
223   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
224   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
225   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
226     PetscMPIInt tag[4];
227     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
228     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
229     for (i=0; i<x->nsendranks; i++) {
230       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
231     }
232     for (i=0; i<x->nrecvranks; i++) {
233       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
234     }
235     x->use_status = PETSC_TRUE;
236   } else {                      /* First time */
237     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
238     x->use_status = PETSC_FALSE;
239   }
240 
241   {
242     PetscInt nstash,reallocs;
243     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
244     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
245     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
246     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
247   }
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "VecAssemblyEnd_MPI_BTS"
253 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
254 {
255   Vec_MPI *x = (Vec_MPI*)X->data;
256   PetscInt bs = X->map->bs;
257   PetscMPIInt npending,*some_indices,r;
258   MPI_Status  *some_statuses;
259   PetscScalar *xarray;
260   PetscErrorCode ierr;
261   VecAssemblyFrame *frame;
262 
263   PetscFunctionBegin;
264   if (X->stash.donotstash) PetscFunctionReturn(0);
265 
266   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
267   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
268   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
269   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
270   while (npending>0) {
271     PetscMPIInt ndone,ii;
272     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
273      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
274      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
275      * subsequent assembly can set a proper subset of the values. */
276     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
277     for (ii=0; ii<ndone; ii++) {
278       PetscInt i = some_indices[ii]/4,j,k;
279       InsertMode imode = x->recvhdr[i].insertmode;
280       PetscInt *recvint;
281       PetscScalar *recvscalar;
282       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
283       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
284       npending--;
285       if (!blockmsg) { /* Scalar stash */
286         PetscMPIInt count;
287         if (--frame[i].pendings > 0) continue;
288         if (x->use_status) {
289           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
290         } else count = x->recvhdr[i].count;
291         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
292           PetscInt loc = *recvint - X->map->rstart;
293           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
294           switch (imode) {
295           case ADD_VALUES:
296             xarray[loc] += *recvscalar++;
297             break;
298           case INSERT_VALUES:
299             xarray[loc] = *recvscalar++;
300             break;
301           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
302           }
303         }
304       } else {                  /* Block stash */
305         PetscMPIInt count;
306         if (--frame[i].pendingb > 0) continue;
307         if (x->use_status) {
308           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
309           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
310         } else count = x->recvhdr[i].bcount;
311         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
312           PetscInt loc = (*recvint)*bs - X->map->rstart;
313           switch (imode) {
314           case ADD_VALUES:
315             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
316             break;
317           case INSERT_VALUES:
318             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
319             break;
320           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
321           }
322         }
323       }
324     }
325   }
326   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
327   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
328   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
329   if (x->assembly_subset) {
330     void *dummy;                /* reset segbuffers */
331     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
332     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
333   } else {
334     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
335   }
336 
337   X->stash.insertmode = NOT_SET_VALUES;
338   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
339   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
340   PetscFunctionReturn(0);
341 }
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "VecAssemblyReset_MPI"
345 PetscErrorCode VecAssemblyReset_MPI(Vec X)
346 {
347   Vec_MPI *x = (Vec_MPI*)X->data;
348   PetscErrorCode ierr;
349 
350   PetscFunctionBegin;
351   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
352   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
353   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
354   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
355   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
356   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
357   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
358   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
359   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
360   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "VecSetFromOptions_MPI"
367 static PetscErrorCode VecSetFromOptions_MPI(Vec X)
368 {
369   PetscErrorCode ierr;
370   PetscBool      flg = PETSC_FALSE,set;
371 
372   PetscFunctionBegin;
373   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
374   if (set) {
375     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
376     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
377   }
378   PetscFunctionReturn(0);
379 }
380 
381 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
382                                 VecDuplicateVecs_Default,
383                                 VecDestroyVecs_Default,
384                                 VecDot_MPI,
385                                 VecMDot_MPI,
386                                 VecNorm_MPI,
387                                 VecTDot_MPI,
388                                 VecMTDot_MPI,
389                                 VecScale_Seq,
390                                 VecCopy_Seq, /* 10 */
391                                 VecSet_Seq,
392                                 VecSwap_Seq,
393                                 VecAXPY_Seq,
394                                 VecAXPBY_Seq,
395                                 VecMAXPY_Seq,
396                                 VecAYPX_Seq,
397                                 VecWAXPY_Seq,
398                                 VecAXPBYPCZ_Seq,
399                                 VecPointwiseMult_Seq,
400                                 VecPointwiseDivide_Seq,
401                                 VecSetValues_MPI, /* 20 */
402                                 VecAssemblyBegin_MPI_BTS,
403                                 VecAssemblyEnd_MPI_BTS,
404                                 0,
405                                 VecGetSize_MPI,
406                                 VecGetSize_Seq,
407                                 0,
408                                 VecMax_MPI,
409                                 VecMin_MPI,
410                                 VecSetRandom_Seq,
411                                 VecSetOption_MPI,
412                                 VecSetValuesBlocked_MPI,
413                                 VecDestroy_MPI,
414                                 VecView_MPI,
415                                 VecPlaceArray_MPI,
416                                 VecReplaceArray_Seq,
417                                 VecDot_Seq,
418                                 VecTDot_Seq,
419                                 VecNorm_Seq,
420                                 VecMDot_Seq,
421                                 VecMTDot_Seq,
422                                 VecLoad_Default,
423                                 VecReciprocal_Default,
424                                 VecConjugate_Seq,
425                                 0,
426                                 0,
427                                 VecResetArray_MPI,
428                                 VecSetFromOptions_MPI,
429                                 VecMaxPointwiseDivide_Seq,
430                                 VecPointwiseMax_Seq,
431                                 VecPointwiseMaxAbs_Seq,
432                                 VecPointwiseMin_Seq,
433                                 VecGetValues_MPI,
434                                 0,
435                                 0,
436                                 0,
437                                 0,
438                                 0,
439                                 0,
440                                 VecStrideGather_Default,
441                                 VecStrideScatter_Default,
442                                 0,
443                                 0,
444                                 0,
445                                 0,
446                                 0,
447                                 VecStrideSubSetGather_Default,
448                                 VecStrideSubSetScatter_Default,
449                                 0,
450                                 0
451 };
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "VecCreate_MPI_Private"
455 /*
456     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
457     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
458     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
459 
460     If alloc is true and array is NULL then this routine allocates the space, otherwise
461     no space is allocated.
462 */
463 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
464 {
465   Vec_MPI        *s;
466   PetscErrorCode ierr;
467 
468   PetscFunctionBegin;
469   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
470   v->data        = (void*)s;
471   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
472   s->nghost      = nghost;
473   v->petscnative = PETSC_TRUE;
474 
475   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
476 
477   s->array           = (PetscScalar*)array;
478   s->array_allocated = 0;
479   if (alloc && !array) {
480     PetscInt n = v->map->n+nghost;
481     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
482     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
483     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
484     s->array_allocated = s->array;
485   }
486 
487   /* By default parallel vectors do not have local representation */
488   s->localrep    = 0;
489   s->localupdate = 0;
490 
491   v->stash.insertmode = NOT_SET_VALUES;
492   /* create the stashes. The block-size for bstash is set later when
493      VecSetValuesBlocked is called.
494   */
495   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
496   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
497 
498 #if defined(PETSC_HAVE_MATLAB_ENGINE)
499   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
500   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
501 #endif
502   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
503   PetscFunctionReturn(0);
504 }
505 
506 /*MC
507    VECMPI - VECMPI = "mpi" - The basic parallel vector
508 
509    Options Database Keys:
510 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
511 
512   Level: beginner
513 
514 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
515 M*/
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "VecCreate_MPI"
519 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
520 {
521   PetscErrorCode ierr;
522 
523   PetscFunctionBegin;
524   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
525   PetscFunctionReturn(0);
526 }
527 
528 /*MC
529    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
530 
531    Options Database Keys:
532 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
533 
534   Level: beginner
535 
536 .seealso: VecCreateSeq(), VecCreateMPI()
537 M*/
538 
539 #undef __FUNCT__
540 #define __FUNCT__ "VecCreate_Standard"
541 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
542 {
543   PetscErrorCode ierr;
544   PetscMPIInt    size;
545 
546   PetscFunctionBegin;
547   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
548   if (size == 1) {
549     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
550   } else {
551     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 #undef __FUNCT__
557 #define __FUNCT__ "VecCreateMPIWithArray"
558 /*@C
559    VecCreateMPIWithArray - Creates a parallel, array-style vector,
560    where the user provides the array space to store the vector values.
561 
562    Collective on MPI_Comm
563 
564    Input Parameters:
565 +  comm  - the MPI communicator to use
566 .  bs    - block size, same meaning as VecSetBlockSize()
567 .  n     - local vector length, cannot be PETSC_DECIDE
568 .  N     - global vector length (or PETSC_DECIDE to have calculated)
569 -  array - the user provided array to store the vector values
570 
571    Output Parameter:
572 .  vv - the vector
573 
574    Notes:
575    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
576    same type as an existing vector.
577 
578    If the user-provided array is NULL, then VecPlaceArray() can be used
579    at a later stage to SET the array for storing the vector values.
580 
581    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
582    The user should not free the array until the vector is destroyed.
583 
584    Level: intermediate
585 
586    Concepts: vectors^creating with array
587 
588 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
589           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
590 
591 @*/
592 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
593 {
594   PetscErrorCode ierr;
595 
596   PetscFunctionBegin;
597   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
598   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
599   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
600   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
601   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
602   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "VecCreateGhostWithArray"
608 /*@C
609    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
610    the caller allocates the array space.
611 
612    Collective on MPI_Comm
613 
614    Input Parameters:
615 +  comm - the MPI communicator to use
616 .  n - local vector length
617 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
618 .  nghost - number of local ghost points
619 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
620 -  array - the space to store the vector values (as long as n + nghost)
621 
622    Output Parameter:
623 .  vv - the global vector representation (without ghost points as part of vector)
624 
625    Notes:
626    Use VecGhostGetLocalForm() to access the local, ghosted representation
627    of the vector.
628 
629    This also automatically sets the ISLocalToGlobalMapping() for this vector.
630 
631    Level: advanced
632 
633    Concepts: vectors^creating with array
634    Concepts: vectors^ghosted
635 
636 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
637           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
638           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
639 
640 @*/
641 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
642 {
643   PetscErrorCode         ierr;
644   Vec_MPI                *w;
645   PetscScalar            *larray;
646   IS                     from,to;
647   ISLocalToGlobalMapping ltog;
648   PetscInt               rstart,i,*indices;
649 
650   PetscFunctionBegin;
651   *vv = 0;
652 
653   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
654   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
655   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
656   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
657   /* Create global representation */
658   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
659   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
660   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
661   w    = (Vec_MPI*)(*vv)->data;
662   /* Create local representation */
663   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
664   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
665   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
666   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
667 
668   /*
669        Create scatter context for scattering (updating) ghost values
670   */
671   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
672   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
673   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
674   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
675   ierr = ISDestroy(&to);CHKERRQ(ierr);
676   ierr = ISDestroy(&from);CHKERRQ(ierr);
677 
678   /* set local to global mapping for ghosted vector */
679   ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
680   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
681   for (i=0; i<n; i++) {
682     indices[i] = rstart + i;
683   }
684   for (i=0; i<nghost; i++) {
685     indices[n+i] = ghosts[i];
686   }
687   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
688   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
689   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
690   PetscFunctionReturn(0);
691 }
692 
693 #undef __FUNCT__
694 #define __FUNCT__ "VecCreateGhost"
695 /*@
696    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
697 
698    Collective on MPI_Comm
699 
700    Input Parameters:
701 +  comm - the MPI communicator to use
702 .  n - local vector length
703 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
704 .  nghost - number of local ghost points
705 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
706 
707    Output Parameter:
708 .  vv - the global vector representation (without ghost points as part of vector)
709 
710    Notes:
711    Use VecGhostGetLocalForm() to access the local, ghosted representation
712    of the vector.
713 
714    This also automatically sets the ISLocalToGlobalMapping() for this vector.
715 
716    Level: advanced
717 
718    Concepts: vectors^ghosted
719 
720 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
721           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
722           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
723           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
724 
725 @*/
726 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
727 {
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "VecMPISetGhost"
737 /*@
738    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
739 
740    Collective on Vec
741 
742    Input Parameters:
743 +  vv - the MPI vector
744 .  nghost - number of local ghost points
745 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
746 
747 
748    Notes:
749    Use VecGhostGetLocalForm() to access the local, ghosted representation
750    of the vector.
751 
752    This also automatically sets the ISLocalToGlobalMapping() for this vector.
753 
754    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
755 
756    Level: advanced
757 
758    Concepts: vectors^ghosted
759 
760 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
761           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
762           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
763           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
764 
765 @*/
766 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
767 {
768   PetscErrorCode ierr;
769   PetscBool      flg;
770 
771   PetscFunctionBegin;
772   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
773   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
774   if (flg) {
775     PetscInt               n,N;
776     Vec_MPI                *w;
777     PetscScalar            *larray;
778     IS                     from,to;
779     ISLocalToGlobalMapping ltog;
780     PetscInt               rstart,i,*indices;
781     MPI_Comm               comm;
782 
783     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
784     n    = vv->map->n;
785     N    = vv->map->N;
786     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
787     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
788     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
789     w    = (Vec_MPI*)(vv)->data;
790     /* Create local representation */
791     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
792     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
793     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
794     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
795 
796     /*
797      Create scatter context for scattering (updating) ghost values
798      */
799     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
800     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
801     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
802     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
803     ierr = ISDestroy(&to);CHKERRQ(ierr);
804     ierr = ISDestroy(&from);CHKERRQ(ierr);
805 
806     /* set local to global mapping for ghosted vector */
807     ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
808     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
809 
810     for (i=0; i<n; i++)      indices[i]   = rstart + i;
811     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
812 
813     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
814     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
815     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
816   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
817   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
818   PetscFunctionReturn(0);
819 }
820 
821 
822 /* ------------------------------------------------------------------------------------------*/
823 #undef __FUNCT__
824 #define __FUNCT__ "VecCreateGhostBlockWithArray"
825 /*@C
826    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
827    the caller allocates the array space. Indices in the ghost region are based on blocks.
828 
829    Collective on MPI_Comm
830 
831    Input Parameters:
832 +  comm - the MPI communicator to use
833 .  bs - block size
834 .  n - local vector length
835 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
836 .  nghost - number of local ghost blocks
837 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
838 -  array - the space to store the vector values (as long as n + nghost*bs)
839 
840    Output Parameter:
841 .  vv - the global vector representation (without ghost points as part of vector)
842 
843    Notes:
844    Use VecGhostGetLocalForm() to access the local, ghosted representation
845    of the vector.
846 
847    n is the local vector size (total local size not the number of blocks) while nghost
848    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
849    portion is bs*nghost
850 
851    Level: advanced
852 
853    Concepts: vectors^creating ghosted
854    Concepts: vectors^creating with array
855 
856 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
857           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
858           VecCreateGhostWithArray(), VecCreateGhostBlock()
859 
860 @*/
861 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
862 {
863   PetscErrorCode         ierr;
864   Vec_MPI                *w;
865   PetscScalar            *larray;
866   IS                     from,to;
867   ISLocalToGlobalMapping ltog;
868   PetscInt               rstart,i,nb,*indices;
869 
870   PetscFunctionBegin;
871   *vv = 0;
872 
873   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
874   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
875   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
876   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
877   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
878   /* Create global representation */
879   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
880   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
881   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
882   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
883   w    = (Vec_MPI*)(*vv)->data;
884   /* Create local representation */
885   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
886   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
887   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
888   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
889 
890   /*
891        Create scatter context for scattering (updating) ghost values
892   */
893   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
894   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
895   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
896   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
897   ierr = ISDestroy(&to);CHKERRQ(ierr);
898   ierr = ISDestroy(&from);CHKERRQ(ierr);
899 
900   /* set local to global mapping for ghosted vector */
901   nb   = n/bs;
902   ierr = PetscMalloc1((nb+nghost),&indices);CHKERRQ(ierr);
903   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
904 
905   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
906   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
907 
908   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
909   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
910   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
911   PetscFunctionReturn(0);
912 }
913 
914 #undef __FUNCT__
915 #define __FUNCT__ "VecCreateGhostBlock"
916 /*@
917    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
918         The indicing of the ghost points is done with blocks.
919 
920    Collective on MPI_Comm
921 
922    Input Parameters:
923 +  comm - the MPI communicator to use
924 .  bs - the block size
925 .  n - local vector length
926 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
927 .  nghost - number of local ghost blocks
928 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
929 
930    Output Parameter:
931 .  vv - the global vector representation (without ghost points as part of vector)
932 
933    Notes:
934    Use VecGhostGetLocalForm() to access the local, ghosted representation
935    of the vector.
936 
937    n is the local vector size (total local size not the number of blocks) while nghost
938    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
939    portion is bs*nghost
940 
941    Level: advanced
942 
943    Concepts: vectors^ghosted
944 
945 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
946           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
947           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
948 
949 @*/
950 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
951 {
952   PetscErrorCode ierr;
953 
954   PetscFunctionBegin;
955   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958