xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d083f849a86f1f43e18d534ee43954e2786cb29a)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscsys.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
9 {
10   PetscScalar    sum,work;
11   PetscErrorCode ierr;
12 
13   PetscFunctionBegin;
14   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
15   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
16   *z   = sum;
17   PetscFunctionReturn(0);
18 }
19 
20 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
21 {
22   PetscScalar    sum,work;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBegin;
26   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
27   ierr = MPIU_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
28   *z   = sum;
29   PetscFunctionReturn(0);
30 }
31 
32 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
33 
34 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
35 {
36   PetscErrorCode ierr;
37   Vec_MPI        *v = (Vec_MPI*)vin->data;
38 
39   PetscFunctionBegin;
40   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
41   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
42   v->array         = (PetscScalar*)a;
43   if (v->localrep) {
44     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
45   }
46   PetscFunctionReturn(0);
47 }
48 
49 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
50 {
51   PetscErrorCode ierr;
52   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
53   PetscScalar    *array;
54 
55   PetscFunctionBegin;
56   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
57   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
58 
59   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
60   vw   = (Vec_MPI*)(*v)->data;
61   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
62 
63   /* save local representation of the parallel vector (and scatter) if it exists */
64   if (w->localrep) {
65     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
66     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
67     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
68     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
69     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
70 
71     vw->localupdate = w->localupdate;
72     if (vw->localupdate) {
73       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
74     }
75   }
76 
77   /* New vector should inherit stashing property of parent */
78   (*v)->stash.donotstash   = win->stash.donotstash;
79   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
80 
81   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
82   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
83 
84   (*v)->map->bs   = PetscAbs(win->map->bs);
85   (*v)->bstash.bs = win->bstash.bs;
86   PetscFunctionReturn(0);
87 }
88 
89 
90 static PetscErrorCode VecSetOption_MPI(Vec V,VecOption op,PetscBool flag)
91 {
92   Vec_MPI        *v = (Vec_MPI*)V->data;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   switch (op) {
97   case VEC_IGNORE_OFF_PROC_ENTRIES: V->stash.donotstash = flag;
98     break;
99   case VEC_IGNORE_NEGATIVE_INDICES: V->stash.ignorenegidx = flag;
100     break;
101   case VEC_SUBSET_OFF_PROC_ENTRIES:
102     v->assembly_subset = flag; /* See the same logic in MatAssembly wrt MAT_SUBSET_OFF_PROC_ENTRIES */
103     if (!v->assembly_subset) { /* User indicates "do not reuse the communication pattern" */
104       ierr = VecAssemblyReset_MPI(V);CHKERRQ(ierr); /* Reset existing pattern to free memory */
105       v->first_assembly_done = PETSC_FALSE; /* Mark the first assembly is not done */
106     }
107     break;
108   }
109 
110   PetscFunctionReturn(0);
111 }
112 
113 
114 static PetscErrorCode VecResetArray_MPI(Vec vin)
115 {
116   Vec_MPI        *v = (Vec_MPI*)vin->data;
117   PetscErrorCode ierr;
118 
119   PetscFunctionBegin;
120   v->array         = v->unplacedarray;
121   v->unplacedarray = 0;
122   if (v->localrep) {
123     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
129 {
130   Vec X = (Vec)ctx;
131   Vec_MPI *x = (Vec_MPI*)X->data;
132   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
133   PetscInt bs = X->map->bs;
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   /* x->first_assembly_done indicates we are reusing a communication network. In that case, some
138      messages can be empty, but we have to send them this time if we sent them before because the
139      receiver is expecting them.
140    */
141   if (hdr->count || (x->first_assembly_done && x->sendptrs[rankid].ints)) {
142     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
143     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
144   }
145   if (hdr->bcount || (x->first_assembly_done && x->sendptrs[rankid].intb)) {
146     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
147     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
153 {
154   Vec X = (Vec)ctx;
155   Vec_MPI *x = (Vec_MPI*)X->data;
156   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
157   PetscErrorCode ierr;
158   PetscInt bs = X->map->bs;
159   VecAssemblyFrame *frame;
160 
161   PetscFunctionBegin;
162   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
163 
164   if (hdr->count) {
165     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
166     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
167     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
168     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
169     frame->pendings = 2;
170   } else {
171     frame->ints = NULL;
172     frame->scalars = NULL;
173     frame->pendings = 0;
174   }
175 
176   if (hdr->bcount) {
177     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
178     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
179     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
180     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
181     frame->pendingb = 2;
182   } else {
183     frame->intb = NULL;
184     frame->scalarb = NULL;
185     frame->pendingb = 0;
186   }
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
191 {
192   Vec_MPI        *x = (Vec_MPI*)X->data;
193   PetscErrorCode ierr;
194   MPI_Comm       comm;
195   PetscInt       i,j,jb,bs;
196 
197   PetscFunctionBegin;
198   if (X->stash.donotstash) PetscFunctionReturn(0);
199 
200   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
201   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
202 #if defined(PETSC_USE_DEBUG)
203   {
204     InsertMode addv;
205     ierr = MPIU_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
206     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
207   }
208 #endif
209   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
210 
211   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
212   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
213 
214   if (!x->sendranks) {
215     PetscMPIInt nowners,bnowners,*owners,*bowners;
216     PetscInt ntmp;
217     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
218     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
219     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
220     x->nsendranks = ntmp;
221     ierr = PetscFree(owners);CHKERRQ(ierr);
222     ierr = PetscFree(bowners);CHKERRQ(ierr);
223     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
224     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
225   }
226   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
227     PetscMPIInt rank = x->sendranks[i];
228     x->sendhdr[i].insertmode = X->stash.insertmode;
229     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
230      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
231      * there is nothing new to send, so that size-zero messages get sent instead. */
232     x->sendhdr[i].count = 0;
233     if (X->stash.n) {
234       x->sendptrs[i].ints    = &X->stash.idx[j];
235       x->sendptrs[i].scalars = &X->stash.array[j];
236       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
237     }
238     x->sendhdr[i].bcount = 0;
239     if (X->bstash.n) {
240       x->sendptrs[i].intb    = &X->bstash.idx[jb];
241       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
242       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
243     }
244   }
245 
246   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
247   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
248   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
249   if (x->first_assembly_done) { /* this is not the first assembly */
250     PetscMPIInt tag[4];
251     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
252     for (i=0; i<x->nsendranks; i++) {
253       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
254     }
255     for (i=0; i<x->nrecvranks; i++) {
256       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
257     }
258     x->use_status = PETSC_TRUE;
259   } else { /* First time assembly */
260     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
261     x->use_status = PETSC_FALSE;
262   }
263 
264   /* The first_assembly_done flag is only meaningful when x->assembly_subset is set.
265      This line says when assembly_subset is set, then we mark that the first assembly is done.
266    */
267   x->first_assembly_done = x->assembly_subset;
268 
269   {
270     PetscInt nstash,reallocs;
271     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
272     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
273     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
274     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
275   }
276   PetscFunctionReturn(0);
277 }
278 
279 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
280 {
281   Vec_MPI *x = (Vec_MPI*)X->data;
282   PetscInt bs = X->map->bs;
283   PetscMPIInt npending,*some_indices,r;
284   MPI_Status  *some_statuses;
285   PetscScalar *xarray;
286   PetscErrorCode ierr;
287   VecAssemblyFrame *frame;
288 
289   PetscFunctionBegin;
290   if (X->stash.donotstash) {
291     X->stash.insertmode = NOT_SET_VALUES;
292     X->bstash.insertmode = NOT_SET_VALUES;
293     PetscFunctionReturn(0);
294   }
295 
296   if (!x->segrecvframe) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Missing segrecvframe! Probably you forgot to call VecAssemblyBegin first");
297   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
298   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
299   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
300   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
301   while (npending>0) {
302     PetscMPIInt ndone=0,ii;
303     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
304      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
305      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
306      * subsequent assembly can set a proper subset of the values. */
307     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
308     for (ii=0; ii<ndone; ii++) {
309       PetscInt i = some_indices[ii]/4,j,k;
310       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
311       PetscInt *recvint;
312       PetscScalar *recvscalar;
313       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
314       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
315       npending--;
316       if (!blockmsg) { /* Scalar stash */
317         PetscMPIInt count;
318         if (--frame[i].pendings > 0) continue;
319         if (x->use_status) {
320           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
321         } else count = x->recvhdr[i].count;
322         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
323           PetscInt loc = *recvint - X->map->rstart;
324           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
325           switch (imode) {
326           case ADD_VALUES:
327             xarray[loc] += *recvscalar++;
328             break;
329           case INSERT_VALUES:
330             xarray[loc] = *recvscalar++;
331             break;
332           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
333           }
334         }
335       } else {                  /* Block stash */
336         PetscMPIInt count;
337         if (--frame[i].pendingb > 0) continue;
338         if (x->use_status) {
339           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
340           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
341         } else count = x->recvhdr[i].bcount;
342         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
343           PetscInt loc = (*recvint)*bs - X->map->rstart;
344           switch (imode) {
345           case ADD_VALUES:
346             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
347             break;
348           case INSERT_VALUES:
349             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
350             break;
351           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
352           }
353         }
354       }
355     }
356   }
357   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
358   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
359   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
360   if (x->assembly_subset) {
361     void *dummy;                /* reset segbuffers */
362     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
363     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
364   } else {
365     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
366   }
367 
368   X->stash.insertmode = NOT_SET_VALUES;
369   X->bstash.insertmode = NOT_SET_VALUES;
370   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
371   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374 
375 PetscErrorCode VecAssemblyReset_MPI(Vec X)
376 {
377   Vec_MPI *x = (Vec_MPI*)X->data;
378   PetscErrorCode ierr;
379 
380   PetscFunctionBegin;
381   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
382   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
383   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
384   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
385   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
386   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
387   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
388   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
389   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
390   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 
394 
395 static PetscErrorCode VecSetFromOptions_MPI(PetscOptionItems *PetscOptionsObject,Vec X)
396 {
397 #if !defined(PETSC_HAVE_MPIUNI)
398   PetscErrorCode ierr;
399   PetscBool      flg = PETSC_FALSE,set;
400 
401   PetscFunctionBegin;
402   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
403   ierr = PetscOptionsBool("-vec_assembly_legacy","Use MPI 1 version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
404   if (set) {
405     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI : VecAssemblyBegin_MPI_BTS;
406     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI   : VecAssemblyEnd_MPI_BTS;
407   }
408   ierr = PetscOptionsTail();CHKERRQ(ierr);
409 #else
410   X->ops->assemblybegin =  VecAssemblyBegin_MPI;
411   X->ops->assemblyend   =  VecAssemblyEnd_MPI;
412 #endif
413   PetscFunctionReturn(0);
414 }
415 
416 
417 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
418                                 VecDuplicateVecs_Default,
419                                 VecDestroyVecs_Default,
420                                 VecDot_MPI,
421                                 VecMDot_MPI,
422                                 VecNorm_MPI,
423                                 VecTDot_MPI,
424                                 VecMTDot_MPI,
425                                 VecScale_Seq,
426                                 VecCopy_Seq, /* 10 */
427                                 VecSet_Seq,
428                                 VecSwap_Seq,
429                                 VecAXPY_Seq,
430                                 VecAXPBY_Seq,
431                                 VecMAXPY_Seq,
432                                 VecAYPX_Seq,
433                                 VecWAXPY_Seq,
434                                 VecAXPBYPCZ_Seq,
435                                 VecPointwiseMult_Seq,
436                                 VecPointwiseDivide_Seq,
437                                 VecSetValues_MPI, /* 20 */
438                                 VecAssemblyBegin_MPI_BTS,
439                                 VecAssemblyEnd_MPI_BTS,
440                                 0,
441                                 VecGetSize_MPI,
442                                 VecGetSize_Seq,
443                                 0,
444                                 VecMax_MPI,
445                                 VecMin_MPI,
446                                 VecSetRandom_Seq,
447                                 VecSetOption_MPI,
448                                 VecSetValuesBlocked_MPI,
449                                 VecDestroy_MPI,
450                                 VecView_MPI,
451                                 VecPlaceArray_MPI,
452                                 VecReplaceArray_Seq,
453                                 VecDot_Seq,
454                                 VecTDot_Seq,
455                                 VecNorm_Seq,
456                                 VecMDot_Seq,
457                                 VecMTDot_Seq,
458                                 VecLoad_Default,
459                                 VecReciprocal_Default,
460                                 VecConjugate_Seq,
461                                 0,
462                                 0,
463                                 VecResetArray_MPI,
464                                 VecSetFromOptions_MPI,/*set from options */
465                                 VecMaxPointwiseDivide_Seq,
466                                 VecPointwiseMax_Seq,
467                                 VecPointwiseMaxAbs_Seq,
468                                 VecPointwiseMin_Seq,
469                                 VecGetValues_MPI,
470                                 0,
471                                 0,
472                                 0,
473                                 0,
474                                 0,
475                                 0,
476                                 VecStrideGather_Default,
477                                 VecStrideScatter_Default,
478                                 0,
479                                 0,
480                                 0,
481                                 0,
482                                 0,
483                                 VecStrideSubSetGather_Default,
484                                 VecStrideSubSetScatter_Default,
485                                 0,
486                                 0
487 };
488 
489 /*
490     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
491     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
492     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
493 
494     If alloc is true and array is NULL then this routine allocates the space, otherwise
495     no space is allocated.
496 */
497 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
498 {
499   Vec_MPI        *s;
500   PetscErrorCode ierr;
501 
502   PetscFunctionBegin;
503   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
504   v->data        = (void*)s;
505   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
506   s->nghost      = nghost;
507   v->petscnative = PETSC_TRUE;
508 
509   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
510 
511   s->array           = (PetscScalar*)array;
512   s->array_allocated = 0;
513   if (alloc && !array) {
514     PetscInt n = v->map->n+nghost;
515     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
516     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
517     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
518     s->array_allocated = s->array;
519   }
520 
521   /* By default parallel vectors do not have local representation */
522   s->localrep    = 0;
523   s->localupdate = 0;
524 
525   v->stash.insertmode = NOT_SET_VALUES;
526   v->bstash.insertmode = NOT_SET_VALUES;
527   /* create the stashes. The block-size for bstash is set later when
528      VecSetValuesBlocked is called.
529   */
530   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
531   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
532 
533 #if defined(PETSC_HAVE_MATLAB_ENGINE)
534   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
535   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
536 #endif
537   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
538   PetscFunctionReturn(0);
539 }
540 
541 /*MC
542    VECMPI - VECMPI = "mpi" - The basic parallel vector
543 
544    Options Database Keys:
545 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
546 
547   Level: beginner
548 
549 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMPIWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMPI()
550 M*/
551 
552 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
558   PetscFunctionReturn(0);
559 }
560 
561 /*MC
562    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
563 
564    Options Database Keys:
565 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
566 
567   Level: beginner
568 
569 .seealso: VecCreateSeq(), VecCreateMPI()
570 M*/
571 
572 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
573 {
574   PetscErrorCode ierr;
575   PetscMPIInt    size;
576 
577   PetscFunctionBegin;
578   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
579   if (size == 1) {
580     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
581   } else {
582     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 /*@C
588    VecCreateMPIWithArray - Creates a parallel, array-style vector,
589    where the user provides the array space to store the vector values.
590 
591    Collective
592 
593    Input Parameters:
594 +  comm  - the MPI communicator to use
595 .  bs    - block size, same meaning as VecSetBlockSize()
596 .  n     - local vector length, cannot be PETSC_DECIDE
597 .  N     - global vector length (or PETSC_DECIDE to have calculated)
598 -  array - the user provided array to store the vector values
599 
600    Output Parameter:
601 .  vv - the vector
602 
603    Notes:
604    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
605    same type as an existing vector.
606 
607    If the user-provided array is NULL, then VecPlaceArray() can be used
608    at a later stage to SET the array for storing the vector values.
609 
610    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
611    The user should not free the array until the vector is destroyed.
612 
613    Level: intermediate
614 
615 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
616           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
617 
618 @*/
619 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
620 {
621   PetscErrorCode ierr;
622 
623   PetscFunctionBegin;
624   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
625   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
626   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
627   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
628   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
629   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
630   PetscFunctionReturn(0);
631 }
632 
633 /*@C
634    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
635    the caller allocates the array space.
636 
637    Collective
638 
639    Input Parameters:
640 +  comm - the MPI communicator to use
641 .  n - local vector length
642 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
643 .  nghost - number of local ghost points
644 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
645 -  array - the space to store the vector values (as long as n + nghost)
646 
647    Output Parameter:
648 .  vv - the global vector representation (without ghost points as part of vector)
649 
650    Notes:
651    Use VecGhostGetLocalForm() to access the local, ghosted representation
652    of the vector.
653 
654    This also automatically sets the ISLocalToGlobalMapping() for this vector.
655 
656    Level: advanced
657 
658 
659 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
660           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
661           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
662 
663 @*/
664 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
665 {
666   PetscErrorCode         ierr;
667   Vec_MPI                *w;
668   PetscScalar            *larray;
669   IS                     from,to;
670   ISLocalToGlobalMapping ltog;
671   PetscInt               rstart,i,*indices;
672 
673   PetscFunctionBegin;
674   *vv = 0;
675 
676   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
677   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
678   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
679   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
680   /* Create global representation */
681   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
682   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
683   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
684   w    = (Vec_MPI*)(*vv)->data;
685   /* Create local representation */
686   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
687   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
688   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
689   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
690 
691   /*
692        Create scatter context for scattering (updating) ghost values
693   */
694   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
695   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
696   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
697   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
698   ierr = ISDestroy(&to);CHKERRQ(ierr);
699   ierr = ISDestroy(&from);CHKERRQ(ierr);
700 
701   /* set local to global mapping for ghosted vector */
702   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
703   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
704   for (i=0; i<n; i++) {
705     indices[i] = rstart + i;
706   }
707   for (i=0; i<nghost; i++) {
708     indices[n+i] = ghosts[i];
709   }
710   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
711   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
712   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
713   PetscFunctionReturn(0);
714 }
715 
716 /*@
717    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
718 
719    Collective
720 
721    Input Parameters:
722 +  comm - the MPI communicator to use
723 .  n - local vector length
724 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
725 .  nghost - number of local ghost points
726 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
727 
728    Output Parameter:
729 .  vv - the global vector representation (without ghost points as part of vector)
730 
731    Notes:
732    Use VecGhostGetLocalForm() to access the local, ghosted representation
733    of the vector.
734 
735    This also automatically sets the ISLocalToGlobalMapping() for this vector.
736 
737    Level: advanced
738 
739 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
740           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
741           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
742           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
743 
744 @*/
745 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
746 {
747   PetscErrorCode ierr;
748 
749   PetscFunctionBegin;
750   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
751   PetscFunctionReturn(0);
752 }
753 
754 /*@
755    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
756 
757    Collective on Vec
758 
759    Input Parameters:
760 +  vv - the MPI vector
761 .  nghost - number of local ghost points
762 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
763 
764 
765    Notes:
766    Use VecGhostGetLocalForm() to access the local, ghosted representation
767    of the vector.
768 
769    This also automatically sets the ISLocalToGlobalMapping() for this vector.
770 
771    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
772 
773    Level: advanced
774 
775 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
776           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
777           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
778           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
779 
780 @*/
781 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
782 {
783   PetscErrorCode ierr;
784   PetscBool      flg;
785 
786   PetscFunctionBegin;
787   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
788   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
789   if (flg) {
790     PetscInt               n,N;
791     Vec_MPI                *w;
792     PetscScalar            *larray;
793     IS                     from,to;
794     ISLocalToGlobalMapping ltog;
795     PetscInt               rstart,i,*indices;
796     MPI_Comm               comm;
797 
798     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
799     n    = vv->map->n;
800     N    = vv->map->N;
801     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
802     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
803     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
804     w    = (Vec_MPI*)(vv)->data;
805     /* Create local representation */
806     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
807     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
808     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
809     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
810 
811     /*
812      Create scatter context for scattering (updating) ghost values
813      */
814     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
815     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
816     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
817     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
818     ierr = ISDestroy(&to);CHKERRQ(ierr);
819     ierr = ISDestroy(&from);CHKERRQ(ierr);
820 
821     /* set local to global mapping for ghosted vector */
822     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
823     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
824 
825     for (i=0; i<n; i++)      indices[i]   = rstart + i;
826     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
827 
828     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
829     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
830     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
831   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
832   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
833   PetscFunctionReturn(0);
834 }
835 
836 
837 /* ------------------------------------------------------------------------------------------*/
838 /*@C
839    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
840    the caller allocates the array space. Indices in the ghost region are based on blocks.
841 
842    Collective
843 
844    Input Parameters:
845 +  comm - the MPI communicator to use
846 .  bs - block size
847 .  n - local vector length
848 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
849 .  nghost - number of local ghost blocks
850 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
851 -  array - the space to store the vector values (as long as n + nghost*bs)
852 
853    Output Parameter:
854 .  vv - the global vector representation (without ghost points as part of vector)
855 
856    Notes:
857    Use VecGhostGetLocalForm() to access the local, ghosted representation
858    of the vector.
859 
860    n is the local vector size (total local size not the number of blocks) while nghost
861    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
862    portion is bs*nghost
863 
864    Level: advanced
865 
866 
867 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
868           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
869           VecCreateGhostWithArray(), VecCreateGhostBlock()
870 
871 @*/
872 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
873 {
874   PetscErrorCode         ierr;
875   Vec_MPI                *w;
876   PetscScalar            *larray;
877   IS                     from,to;
878   ISLocalToGlobalMapping ltog;
879   PetscInt               rstart,i,nb,*indices;
880 
881   PetscFunctionBegin;
882   *vv = 0;
883 
884   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
885   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
886   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
887   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
888   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
889   /* Create global representation */
890   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
891   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
892   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
893   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
894   w    = (Vec_MPI*)(*vv)->data;
895   /* Create local representation */
896   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
897   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
898   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
899   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
900 
901   /*
902        Create scatter context for scattering (updating) ghost values
903   */
904   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
905   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
906   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
907   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
908   ierr = ISDestroy(&to);CHKERRQ(ierr);
909   ierr = ISDestroy(&from);CHKERRQ(ierr);
910 
911   /* set local to global mapping for ghosted vector */
912   nb     = n/bs;
913   ierr   = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
914   ierr   = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
915   rstart = rstart/bs;
916 
917   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
918   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
919 
920   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
921   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
922   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 /*@
927    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
928         The indicing of the ghost points is done with blocks.
929 
930    Collective
931 
932    Input Parameters:
933 +  comm - the MPI communicator to use
934 .  bs - the block size
935 .  n - local vector length
936 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
937 .  nghost - number of local ghost blocks
938 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
939 
940    Output Parameter:
941 .  vv - the global vector representation (without ghost points as part of vector)
942 
943    Notes:
944    Use VecGhostGetLocalForm() to access the local, ghosted representation
945    of the vector.
946 
947    n is the local vector size (total local size not the number of blocks) while nghost
948    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
949    portion is bs*nghost
950 
951    Level: advanced
952 
953 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
954           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
955           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
956 
957 @*/
958 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
959 {
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966