xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision fe2efc57b7777594bce7568e90822861480cbdc8)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   switch (op) {
97   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
98     break;
99   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
100     break;
101   case VEC_SUBSET_OFF_PROC_ENTRIES:
102     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
103     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
104       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
105       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
106     }
107     break;
108   }
109 
110   PetscFunctionReturn(0);
111 }
112 
113 
114 static PetscErrorCode VecResetArray_MPI(Vec vin)
115 {
116   Vec_MPI        *v = (Vec_MPI*)vin->data;
117   PetscErrorCode ierr;
118 
119   PetscFunctionBegin;
120   v->array         = v->unplacedarray;
121   v->unplacedarray = 0;
122   if (v->localrep) {
123     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
129 {
130   Vec X = (Vec)ctx;
131   Vec_MPI *x = (Vec_MPI*)X->data;
132   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
133   PetscInt bs = X->map->bs;
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
138      messages can be empty, but we have to send them this time if we sent them before because the
139      receiver is expecting them.
140    */
141   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
142     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
143     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144   }
145   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
146     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
147     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
153 {
154   Vec X = (Vec)ctx;
155   Vec_MPI *x = (Vec_MPI*)X->data;
156   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
157   PetscErrorCode ierr;
158   PetscInt bs = X->map->bs;
159   VecAssemblyFrame *frame;
160 
161   PetscFunctionBegin;
162   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
163 
164   if (hdr->count) {
165     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
167     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
169     frame->pendings = 2;
170   } else {
171     frame->ints = NULL;
172     frame->scalars = NULL;
173     frame->pendings = 0;
174   }
175 
176   if (hdr->bcount) {
177     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
179     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
180     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
181     frame->pendingb = 2;
182   } else {
183     frame->intb = NULL;
184     frame->scalarb = NULL;
185     frame->pendingb = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
191 {
192   Vec_MPI        *x = (Vec_MPI*)X->data;
193   PetscErrorCode ierr;
194   MPI_Comm       comm;
195   PetscInt       i,j,jb,bs;
196 
197   PetscFunctionBegin;
198   if (X->stash.donotstash) PetscFunctionReturn(0);
199 
200   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
201   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
202 #if defined(PETSC_USE_DEBUG)
203   {
204     InsertMode addv;
205     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
206     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
207   }
208 #endif
209   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
210 
211   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
212   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
213 
214   if (!x->sendranks) {
215     PetscMPIInt nowners,bnowners,*owners,*bowners;
216     PetscInt ntmp;
217     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
218     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
219     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
220     x->nsendranks = ntmp;
221     ierr = PetscFree(owners);CHKERRQ(ierr);
222     ierr = PetscFree(bowners);CHKERRQ(ierr);
223     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
224     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
225   }
226   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
227     PetscMPIInt rank = x->sendranks[i];
228     x->sendhdr[i].insertmode = X->stash.insertmode;
229     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
230      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
231      * there is nothing new to send, so that size-zero messages get sent instead. */
232     x->sendhdr[i].count = 0;
233     if (X->stash.n) {
234       x->sendptrs[i].ints    = &X->stash.idx[j];
235       x->sendptrs[i].scalars = &X->stash.array[j];
236       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
237     }
238     x->sendhdr[i].bcount = 0;
239     if (X->bstash.n) {
240       x->sendptrs[i].intb    = &X->bstash.idx[jb];
241       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
242       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
243     }
244   }
245 
246   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
247   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
248   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
249   if (x->first_assembly_done) { /* this is not the first assembly */
250     PetscMPIInt tag[4];
251     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
252     for (i=0; i<x->nsendranks; i++) {
253       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
254     }
255     for (i=0; i<x->nrecvranks; i++) {
256       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
257     }
258     x->use_status = PETSC_TRUE;
259   } else { /* First time assembly */
260     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
261     x->use_status = PETSC_FALSE;
262   }
263 
264   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
265      This line says when assembly_subset is set, then we mark that the first assembly is done.
266    */
267   x->first_assembly_done = x->assembly_subset;
268 
269   {
270     PetscInt nstash,reallocs;
271     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
272     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
273     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
274     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
280 {
281   Vec_MPI *x = (Vec_MPI*)X->data;
282   PetscInt bs = X->map->bs;
283   PetscMPIInt npending,*some_indices,r;
284   MPI_Status  *some_statuses;
285   PetscScalar *xarray;
286   PetscErrorCode ierr;
287   VecAssemblyFrame *frame;
288 
289   PetscFunctionBegin;
290   if (X->stash.donotstash) {
291     X->stash.insertmode = NOT_SET_VALUES;
292     X->bstash.insertmode = NOT_SET_VALUES;
293     PetscFunctionReturn(0);
294   }
295 
296   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
297   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
298   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
299   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
300   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
301   while (npending>0) {
302     PetscMPIInt ndone=0,ii;
303     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
304      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
305      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
306      * subsequent assembly can set a proper subset of the values. */
307     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
308     for (ii=0; ii<ndone; ii++) {
309       PetscInt i = some_indices[ii]/4,j,k;
310       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
311       PetscInt *recvint;
312       PetscScalar *recvscalar;
313       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
314       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
315       npending--;
316       if (!blockmsg) { /* Scalar stash */
317         PetscMPIInt count;
318         if (--frame[i].pendings > 0) continue;
319         if (x->use_status) {
320           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
321         } else count = x->recvhdr[i].count;
322         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
323           PetscInt loc = *recvint - X->map->rstart;
324           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
325           switch (imode) {
326           case ADD_VALUES:
327             xarray[loc] += *recvscalar++;
328             break;
329           case INSERT_VALUES:
330             xarray[loc] = *recvscalar++;
331             break;
332           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
333           }
334         }
335       } else {                  /* Block stash */
336         PetscMPIInt count;
337         if (--frame[i].pendingb > 0) continue;
338         if (x->use_status) {
339           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
340           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
341         } else count = x->recvhdr[i].bcount;
342         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
343           PetscInt loc = (*recvint)*bs - X->map->rstart;
344           switch (imode) {
345           case ADD_VALUES:
346             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
347             break;
348           case INSERT_VALUES:
349             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
350             break;
351           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
352           }
353         }
354       }
355     }
356   }
357   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
358   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
359   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
360   if (x->assembly_subset) {
361     void *dummy;                /* reset segbuffers */
362     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
363     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
364   } else {
365     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
366   }
367 
368   X->stash.insertmode = NOT_SET_VALUES;
369   X->bstash.insertmode = NOT_SET_VALUES;
370   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
371   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 
375 PetscErrorCode VecAssemblyReset_MPI(Vec X)
376 {
377   Vec_MPI *x = (Vec_MPI*)X->data;
378   PetscErrorCode ierr;
379 
380   PetscFunctionBegin;
381   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
384   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
385   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
386   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
387   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
388   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
389   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
390   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 
395 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
396 {
397 #if !defined(PETSC_HAVE_MPIUNI)
398   PetscErrorCode ierr;
399   PetscBool      flg = PETSC_FALSE,set;
400 
401   PetscFunctionBegin;
402   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
403   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
404   if (set) {
405     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
406     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
407   }
408   ierr = PetscOptionsTail();CHKERRQ(ierr);
409 #else
410   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
411   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
412 #endif
413   PetscFunctionReturn(0);
414 }
415 
416 
417 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
418                                 VecDuplicateVecs_Default,
419                                 VecDestroyVecs_Default,
420                                 VecDot_MPI,
421                                 VecMDot_MPI,
422                                 VecNorm_MPI,
423                                 VecTDot_MPI,
424                                 VecMTDot_MPI,
425                                 VecScale_Seq,
426                                 VecCopy_Seq, /* 10 */
427                                 VecSet_Seq,
428                                 VecSwap_Seq,
429                                 VecAXPY_Seq,
430                                 VecAXPBY_Seq,
431                                 VecMAXPY_Seq,
432                                 VecAYPX_Seq,
433                                 VecWAXPY_Seq,
434                                 VecAXPBYPCZ_Seq,
435                                 VecPointwiseMult_Seq,
436                                 VecPointwiseDivide_Seq,
437                                 VecSetValues_MPI, /* 20 */
438                                 VecAssemblyBegin_MPI_BTS,
439                                 VecAssemblyEnd_MPI_BTS,
440                                 0,
441                                 VecGetSize_MPI,
442                                 VecGetSize_Seq,
443                                 0,
444                                 VecMax_MPI,
445                                 VecMin_MPI,
446                                 VecSetRandom_Seq,
447                                 VecSetOption_MPI,
448                                 VecSetValuesBlocked_MPI,
449                                 VecDestroy_MPI,
450                                 VecView_MPI,
451                                 VecPlaceArray_MPI,
452                                 VecReplaceArray_Seq,
453                                 VecDot_Seq,
454                                 VecTDot_Seq,
455                                 VecNorm_Seq,
456                                 VecMDot_Seq,
457                                 VecMTDot_Seq,
458                                 VecLoad_Default,
459                                 VecReciprocal_Default,
460                                 VecConjugate_Seq,
461                                 0,
462                                 0,
463                                 VecResetArray_MPI,
464                                 VecSetFromOptions_MPI,/*set from options */
465                                 VecMaxPointwiseDivide_Seq,
466                                 VecPointwiseMax_Seq,
467                                 VecPointwiseMaxAbs_Seq,
468                                 VecPointwiseMin_Seq,
469                                 VecGetValues_MPI,
470                                 0,
471                                 0,
472                                 0,
473                                 0,
474                                 0,
475                                 0,
476                                 VecStrideGather_Default,
477                                 VecStrideScatter_Default,
478                                 0,
479                                 0,
480                                 0,
481                                 0,
482                                 0,
483                                 VecStrideSubSetGather_Default,
484                                 VecStrideSubSetScatter_Default,
485                                 0,
486                                 0
487 };
488 
489 /*
490     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
491     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
492     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
493 
494     If alloc is true and array is NULL then this routine allocates the space, otherwise
495     no space is allocated.
496 */
497 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
498 {
499   Vec_MPI        *s;
500   PetscErrorCode ierr;
501 
502   PetscFunctionBegin;
503   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
504   v->data        = (void*)s;
505   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
506   s->nghost      = nghost;
507   v->petscnative = PETSC_TRUE;
508 
509   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
510 
511   s->array           = (PetscScalar*)array;
512   s->array_allocated = 0;
513   if (alloc && !array) {
514     PetscInt n = v->map->n+nghost;
515     ierr               = PetscCalloc1(n,&s->array);CHKERRQ(ierr);
516     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
517     s->array_allocated = s->array;
518   }
519 
520   /* By default parallel vectors do not have local representation */
521   s->localrep    = 0;
522   s->localupdate = 0;
523 
524   v->stash.insertmode = NOT_SET_VALUES;
525   v->bstash.insertmode = NOT_SET_VALUES;
526   /* create the stashes. The block-size for bstash is set later when
527      VecSetValuesBlocked is called.
528   */
529   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
530   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
531 
532 #if defined(PETSC_HAVE_MATLAB_ENGINE)
533   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
534   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
535 #endif
536   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
537   PetscFunctionReturn(0);
538 }
539 
540 /*MC
541    VECMPI - VECMPI = "mpi" - The basic parallel vector
542 
543    Options Database Keys:
544 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
545 
546   Level: beginner
547 
548 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
549 M*/
550 
551 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
552 {
553   PetscErrorCode ierr;
554 
555   PetscFunctionBegin;
556   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
557   PetscFunctionReturn(0);
558 }
559 
560 /*MC
561    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
562 
563    Options Database Keys:
564 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
565 
566   Level: beginner
567 
568 .seealso: VecCreateSeq(), VecCreateMPI()
569 M*/
570 
571 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
572 {
573   PetscErrorCode ierr;
574   PetscMPIInt    size;
575 
576   PetscFunctionBegin;
577   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
578   if (size == 1) {
579     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
580   } else {
581     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
582   }
583   PetscFunctionReturn(0);
584 }
585 
586 /*@C
587    VecCreateMPIWithArray - Creates a parallel, array-style vector,
588    where the user provides the array space to store the vector values.
589 
590    Collective
591 
592    Input Parameters:
593 +  comm  - the MPI communicator to use
594 .  bs    - block size, same meaning as VecSetBlockSize()
595 .  n     - local vector length, cannot be PETSC_DECIDE
596 .  N     - global vector length (or PETSC_DECIDE to have calculated)
597 -  array - the user provided array to store the vector values
598 
599    Output Parameter:
600 .  vv - the vector
601 
602    Notes:
603    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
604    same type as an existing vector.
605 
606    If the user-provided array is NULL, then VecPlaceArray() can be used
607    at a later stage to SET the array for storing the vector values.
608 
609    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
610    The user should not free the array until the vector is destroyed.
611 
612    Level: intermediate
613 
614 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
615           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
616 
617 @*/
618 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
619 {
620   PetscErrorCode ierr;
621 
622   PetscFunctionBegin;
623   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
624   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
625   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
626   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
627   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
628   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
629   PetscFunctionReturn(0);
630 }
631 
632 /*@C
633    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
634    the caller allocates the array space.
635 
636    Collective
637 
638    Input Parameters:
639 +  comm - the MPI communicator to use
640 .  n - local vector length
641 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
642 .  nghost - number of local ghost points
643 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
644 -  array - the space to store the vector values (as long as n + nghost)
645 
646    Output Parameter:
647 .  vv - the global vector representation (without ghost points as part of vector)
648 
649    Notes:
650    Use VecGhostGetLocalForm() to access the local, ghosted representation
651    of the vector.
652 
653    This also automatically sets the ISLocalToGlobalMapping() for this vector.
654 
655    Level: advanced
656 
657 
658 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
659           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
660           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
661 
662 @*/
663 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
664 {
665   PetscErrorCode         ierr;
666   Vec_MPI                *w;
667   PetscScalar            *larray;
668   IS                     from,to;
669   ISLocalToGlobalMapping ltog;
670   PetscInt               rstart,i,*indices;
671 
672   PetscFunctionBegin;
673   *vv = 0;
674 
675   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
676   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
677   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
678   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
679   /* Create global representation */
680   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
681   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
682   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
683   w    = (Vec_MPI*)(*vv)->data;
684   /* Create local representation */
685   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
686   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
687   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
688   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
689 
690   /*
691        Create scatter context for scattering (updating) ghost values
692   */
693   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
694   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
695   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
696   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
697   ierr = ISDestroy(&to);CHKERRQ(ierr);
698   ierr = ISDestroy(&from);CHKERRQ(ierr);
699 
700   /* set local to global mapping for ghosted vector */
701   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
702   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
703   for (i=0; i<n; i++) {
704     indices[i] = rstart + i;
705   }
706   for (i=0; i<nghost; i++) {
707     indices[n+i] = ghosts[i];
708   }
709   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
710   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
711   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 /*@
716    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
717 
718    Collective
719 
720    Input Parameters:
721 +  comm - the MPI communicator to use
722 .  n - local vector length
723 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
724 .  nghost - number of local ghost points
725 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
726 
727    Output Parameter:
728 .  vv - the global vector representation (without ghost points as part of vector)
729 
730    Notes:
731    Use VecGhostGetLocalForm() to access the local, ghosted representation
732    of the vector.
733 
734    This also automatically sets the ISLocalToGlobalMapping() for this vector.
735 
736    Level: advanced
737 
738 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
739           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
740           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
741           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
742 
743 @*/
744 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 /*@
754    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
755 
756    Collective on Vec
757 
758    Input Parameters:
759 +  vv - the MPI vector
760 .  nghost - number of local ghost points
761 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
762 
763 
764    Notes:
765    Use VecGhostGetLocalForm() to access the local, ghosted representation
766    of the vector.
767 
768    This also automatically sets the ISLocalToGlobalMapping() for this vector.
769 
770    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
771 
772    Level: advanced
773 
774 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
775           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
776           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
777           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
778 
779 @*/
780 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
781 {
782   PetscErrorCode ierr;
783   PetscBool      flg;
784 
785   PetscFunctionBegin;
786   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
787   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
788   if (flg) {
789     PetscInt               n,N;
790     Vec_MPI                *w;
791     PetscScalar            *larray;
792     IS                     from,to;
793     ISLocalToGlobalMapping ltog;
794     PetscInt               rstart,i,*indices;
795     MPI_Comm               comm;
796 
797     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
798     n    = vv->map->n;
799     N    = vv->map->N;
800     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
801     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
802     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
803     w    = (Vec_MPI*)(vv)->data;
804     /* Create local representation */
805     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
806     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
807     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
808     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
809 
810     /*
811      Create scatter context for scattering (updating) ghost values
812      */
813     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
814     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
815     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
816     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
817     ierr = ISDestroy(&to);CHKERRQ(ierr);
818     ierr = ISDestroy(&from);CHKERRQ(ierr);
819 
820     /* set local to global mapping for ghosted vector */
821     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
822     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
823 
824     for (i=0; i<n; i++)      indices[i]   = rstart + i;
825     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
826 
827     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
828     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
829     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
830   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
831   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
832   PetscFunctionReturn(0);
833 }
834 
835 
836 /* ------------------------------------------------------------------------------------------*/
837 /*@C
838    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
839    the caller allocates the array space. Indices in the ghost region are based on blocks.
840 
841    Collective
842 
843    Input Parameters:
844 +  comm - the MPI communicator to use
845 .  bs - block size
846 .  n - local vector length
847 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
848 .  nghost - number of local ghost blocks
849 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
850 -  array - the space to store the vector values (as long as n + nghost*bs)
851 
852    Output Parameter:
853 .  vv - the global vector representation (without ghost points as part of vector)
854 
855    Notes:
856    Use VecGhostGetLocalForm() to access the local, ghosted representation
857    of the vector.
858 
859    n is the local vector size (total local size not the number of blocks) while nghost
860    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
861    portion is bs*nghost
862 
863    Level: advanced
864 
865 
866 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
867           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
868           VecCreateGhostWithArray(), VecCreateGhostBlock()
869 
870 @*/
871 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
872 {
873   PetscErrorCode         ierr;
874   Vec_MPI                *w;
875   PetscScalar            *larray;
876   IS                     from,to;
877   ISLocalToGlobalMapping ltog;
878   PetscInt               rstart,i,nb,*indices;
879 
880   PetscFunctionBegin;
881   *vv = 0;
882 
883   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
884   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
885   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
886   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
887   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
888   /* Create global representation */
889   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
890   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
891   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
892   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
893   w    = (Vec_MPI*)(*vv)->data;
894   /* Create local representation */
895   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
896   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
897   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
898   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
899 
900   /*
901        Create scatter context for scattering (updating) ghost values
902   */
903   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
904   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
905   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
906   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
907   ierr = ISDestroy(&to);CHKERRQ(ierr);
908   ierr = ISDestroy(&from);CHKERRQ(ierr);
909 
910   /* set local to global mapping for ghosted vector */
911   nb     = n/bs;
912   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
913   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
914   rstart = rstart/bs;
915 
916   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
917   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
918 
919   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
920   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
921   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 /*@
926    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
927         The indicing of the ghost points is done with blocks.
928 
929    Collective
930 
931    Input Parameters:
932 +  comm - the MPI communicator to use
933 .  bs - the block size
934 .  n - local vector length
935 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
936 .  nghost - number of local ghost blocks
937 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
938 
939    Output Parameter:
940 .  vv - the global vector representation (without ghost points as part of vector)
941 
942    Notes:
943    Use VecGhostGetLocalForm() to access the local, ghosted representation
944    of the vector.
945 
946    n is the local vector size (total local size not the number of blocks) while nghost
947    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
948    portion is bs*nghost
949 
950    Level: advanced
951 
952 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
953           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
954           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
955 
956 @*/
957 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
958 {
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965