xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision d68cfcfb7960458db93f478d4903808ac3f22c88)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <petscoptions.h>
6 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "VecDot_MPI"
10 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
11 {
12   PetscScalar    sum,work;
13   PetscErrorCode ierr;
14 
15   PetscFunctionBegin;
16   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
17   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
18   *z   = sum;
19   PetscFunctionReturn(0);
20 }
21 
22 #undef __FUNCT__
23 #define __FUNCT__ "VecTDot_MPI"
24 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
25 {
26   PetscScalar    sum,work;
27   PetscErrorCode ierr;
28 
29   PetscFunctionBegin;
30   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
31   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
32   *z   = sum;
33   PetscFunctionReturn(0);
34 }
35 
36 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "VecPlaceArray_MPI"
40 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
41 {
42   PetscErrorCode ierr;
43   Vec_MPI        *v = (Vec_MPI*)vin->data;
44 
45   PetscFunctionBegin;
46   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
47   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
48   v->array         = (PetscScalar*)a;
49   if (v->localrep) {
50     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "VecDuplicate_MPI"
59 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
60 {
61   PetscErrorCode ierr;
62   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
63   PetscScalar    *array;
64 
65   PetscFunctionBegin;
66   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
67   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
68 
69   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
70   vw   = (Vec_MPI*)(*v)->data;
71   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
72 
73   /* save local representation of the parallel vector (and scatter) if it exists */
74   if (w->localrep) {
75     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
76     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
77     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
78     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
79     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
80 
81     vw->localupdate = w->localupdate;
82     if (vw->localupdate) {
83       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
84     }
85   }
86 
87   /* New vector should inherit stashing property of parent */
88   (*v)->stash.donotstash   = win->stash.donotstash;
89   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
90 
91   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
92   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
93 
94   (*v)->map->bs   = PetscAbs(win->map->bs);
95   (*v)->bstash.bs = win->bstash.bs;
96   PetscFunctionReturn(0);
97 }
98 
99 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
100 extern PetscErrorCode VecResetArray_MPI(Vec);
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "VecAssemblySend_MPI_Private"
104 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
105 {
106   Vec X = (Vec)ctx;
107   Vec_MPI *x = (Vec_MPI*)X->data;
108   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
109   PetscInt bs = X->map->bs;
110   PetscErrorCode ierr;
111 
112   PetscFunctionBegin;
113   /* x->recvhdr only exists when we are reusing a communication network.  In that case, some messages can be empty, but
114    * we have to send them this time if we sent them before because the receiver is expecting them. */
115   if (hdr->count || (x->recvhdr && x->sendptrs[rankid].ints)) {
116     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
117     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
118   }
119   if (hdr->bcount || (x->recvhdr && x->sendptrs[rankid].intb)) {
120     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
121     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
122   }
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "VecAssemblyRecv_MPI_Private"
128 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
129 {
130   Vec X = (Vec)ctx;
131   Vec_MPI *x = (Vec_MPI*)X->data;
132   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
133   PetscErrorCode ierr;
134   PetscInt bs = X->map->bs;
135   VecAssemblyFrame *frame;
136 
137   PetscFunctionBegin;
138   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
139 
140   if (hdr->count) {
141     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
142     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
143     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
144     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
145     frame->pendings = 2;
146   } else {
147     frame->ints = NULL;
148     frame->scalars = NULL;
149     frame->pendings = 0;
150   }
151 
152   if (hdr->bcount) {
153     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
154     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
155     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
156     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
157     frame->pendingb = 2;
158   } else {
159     frame->intb = NULL;
160     frame->scalarb = NULL;
161     frame->pendingb = 0;
162   }
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "VecAssemblyBegin_MPI_BTS"
168 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
169 {
170   Vec_MPI        *x = (Vec_MPI*)X->data;
171   PetscErrorCode ierr;
172   MPI_Comm       comm;
173   PetscInt       i,j,jb,bs;
174 
175   PetscFunctionBegin;
176   if (X->stash.donotstash) PetscFunctionReturn(0);
177 
178   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
179   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
180 #if defined(PETSC_USE_DEBUG)
181   {
182     InsertMode addv;
183     ierr = MPI_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
184     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
185   }
186 #endif
187   X->bstash.insertmode = X->stash.insertmode; /* Block stash implicitly tracks InsertMode of scalar stash */
188 
189   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
190   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
191 
192   if (!x->sendranks) {
193     PetscMPIInt nowners,bnowners,*owners,*bowners;
194     PetscInt ntmp;
195     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
196     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
197     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&ntmp,&x->sendranks);CHKERRQ(ierr);
198     x->nsendranks = ntmp;
199     ierr = PetscFree(owners);CHKERRQ(ierr);
200     ierr = PetscFree(bowners);CHKERRQ(ierr);
201     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
202     ierr = PetscCalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
203   }
204   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
205     PetscMPIInt rank = x->sendranks[i];
206     x->sendhdr[i].insertmode = X->stash.insertmode;
207     /* Initialize pointers for non-empty stashes the first time around.  Subsequent assemblies with
208      * VEC_SUBSET_OFF_PROC_ENTRIES will leave the old pointers (dangling because the stash has been collected) when
209      * there is nothing new to send, so that size-zero messages get sent instead. */
210     x->sendhdr[i].count = 0;
211     if (X->stash.n) {
212       x->sendptrs[i].ints    = &X->stash.idx[j];
213       x->sendptrs[i].scalars = &X->stash.array[j];
214       for ( ; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
215     }
216     x->sendhdr[i].bcount = 0;
217     if (X->bstash.n) {
218       x->sendptrs[i].intb    = &X->bstash.idx[jb];
219       x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
220       for ( ; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
221     }
222   }
223 
224   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
225   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
226   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
227   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
228     PetscMPIInt tag[4];
229     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
230     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
231     for (i=0; i<x->nsendranks; i++) {
232       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
233     }
234     for (i=0; i<x->nrecvranks; i++) {
235       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
236     }
237     x->use_status = PETSC_TRUE;
238   } else {                      /* First time */
239     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
240     x->use_status = PETSC_FALSE;
241   }
242 
243   {
244     PetscInt nstash,reallocs;
245     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
246     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
247     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
248     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
249   }
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "VecAssemblyEnd_MPI_BTS"
255 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
256 {
257   Vec_MPI *x = (Vec_MPI*)X->data;
258   PetscInt bs = X->map->bs;
259   PetscMPIInt npending,*some_indices,r;
260   MPI_Status  *some_statuses;
261   PetscScalar *xarray;
262   PetscErrorCode ierr;
263   VecAssemblyFrame *frame;
264 
265   PetscFunctionBegin;
266   if (X->stash.donotstash) {
267     X->stash.insertmode = NOT_SET_VALUES;
268     X->bstash.insertmode = NOT_SET_VALUES;
269     PetscFunctionReturn(0);
270   }
271 
272   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
273   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
274   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
275   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
276   while (npending>0) {
277     PetscMPIInt ndone,ii;
278     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
279      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
280      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
281      * subsequent assembly can set a proper subset of the values. */
282     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
283     for (ii=0; ii<ndone; ii++) {
284       PetscInt i = some_indices[ii]/4,j,k;
285       InsertMode imode = (InsertMode)x->recvhdr[i].insertmode;
286       PetscInt *recvint;
287       PetscScalar *recvscalar;
288       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
289       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
290       npending--;
291       if (!blockmsg) { /* Scalar stash */
292         PetscMPIInt count;
293         if (--frame[i].pendings > 0) continue;
294         if (x->use_status) {
295           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
296         } else count = x->recvhdr[i].count;
297         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
298           PetscInt loc = *recvint - X->map->rstart;
299           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
300           switch (imode) {
301           case ADD_VALUES:
302             xarray[loc] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             xarray[loc] = *recvscalar++;
306             break;
307           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
308           }
309         }
310       } else {                  /* Block stash */
311         PetscMPIInt count;
312         if (--frame[i].pendingb > 0) continue;
313         if (x->use_status) {
314           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
315           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
316         } else count = x->recvhdr[i].bcount;
317         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
318           PetscInt loc = (*recvint)*bs - X->map->rstart;
319           switch (imode) {
320           case ADD_VALUES:
321             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
322             break;
323           case INSERT_VALUES:
324             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
325             break;
326           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
327           }
328         }
329       }
330     }
331   }
332   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
333   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
334   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
335   if (x->assembly_subset) {
336     void *dummy;                /* reset segbuffers */
337     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
338     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
339   } else {
340     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
341   }
342 
343   X->stash.insertmode = NOT_SET_VALUES;
344   X->bstash.insertmode = NOT_SET_VALUES;
345   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
346   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "VecAssemblyReset_MPI"
352 PetscErrorCode VecAssemblyReset_MPI(Vec X)
353 {
354   Vec_MPI *x = (Vec_MPI*)X->data;
355   PetscErrorCode ierr;
356 
357   PetscFunctionBegin;
358   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
359   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
360   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
361   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
362   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
363   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
364   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
365   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
366   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
367   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "VecSetFromOptions_MPI"
374 static PetscErrorCode VecSetFromOptions_MPI(PetscOptions *PetscOptionsObject,Vec X)
375 {
376   PetscErrorCode ierr;
377   PetscBool      flg = PETSC_FALSE,set;
378 
379   PetscFunctionBegin;
380   ierr = PetscOptionsHead(PetscOptionsObject,"VecMPI Options");CHKERRQ(ierr);
381   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
382   if (set) {
383     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
384     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
385   }
386   ierr = PetscOptionsTail();CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 
391 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
392                                 VecDuplicateVecs_Default,
393                                 VecDestroyVecs_Default,
394                                 VecDot_MPI,
395                                 VecMDot_MPI,
396                                 VecNorm_MPI,
397                                 VecTDot_MPI,
398                                 VecMTDot_MPI,
399                                 VecScale_Seq,
400                                 VecCopy_Seq, /* 10 */
401                                 VecSet_Seq,
402                                 VecSwap_Seq,
403                                 VecAXPY_Seq,
404                                 VecAXPBY_Seq,
405                                 VecMAXPY_Seq,
406                                 VecAYPX_Seq,
407                                 VecWAXPY_Seq,
408                                 VecAXPBYPCZ_Seq,
409                                 VecPointwiseMult_Seq,
410                                 VecPointwiseDivide_Seq,
411                                 VecSetValues_MPI, /* 20 */
412                                 VecAssemblyBegin_MPI_BTS,
413                                 VecAssemblyEnd_MPI_BTS,
414                                 0,
415                                 VecGetSize_MPI,
416                                 VecGetSize_Seq,
417                                 0,
418                                 VecMax_MPI,
419                                 VecMin_MPI,
420                                 VecSetRandom_Seq,
421                                 VecSetOption_MPI,
422                                 VecSetValuesBlocked_MPI,
423                                 VecDestroy_MPI,
424                                 VecView_MPI,
425                                 VecPlaceArray_MPI,
426                                 VecReplaceArray_Seq,
427                                 VecDot_Seq,
428                                 VecTDot_Seq,
429                                 VecNorm_Seq,
430                                 VecMDot_Seq,
431                                 VecMTDot_Seq,
432                                 VecLoad_Default,
433                                 VecReciprocal_Default,
434                                 VecConjugate_Seq,
435                                 0,
436                                 0,
437                                 VecResetArray_MPI,
438                                 VecSetFromOptions_MPI,/*set from options */
439                                 VecMaxPointwiseDivide_Seq,
440                                 VecPointwiseMax_Seq,
441                                 VecPointwiseMaxAbs_Seq,
442                                 VecPointwiseMin_Seq,
443                                 VecGetValues_MPI,
444                                 0,
445                                 0,
446                                 0,
447                                 0,
448                                 0,
449                                 0,
450                                 VecStrideGather_Default,
451                                 VecStrideScatter_Default,
452                                 0,
453                                 0,
454                                 0,
455                                 0,
456                                 0,
457                                 VecStrideSubSetGather_Default,
458                                 VecStrideSubSetScatter_Default,
459                                 0,
460                                 0
461 };
462 
463 #undef __FUNCT__
464 #define __FUNCT__ "VecCreate_MPI_Private"
465 /*
466     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
467     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
468     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
469 
470     If alloc is true and array is NULL then this routine allocates the space, otherwise
471     no space is allocated.
472 */
473 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
474 {
475   Vec_MPI        *s;
476   PetscErrorCode ierr;
477 
478   PetscFunctionBegin;
479   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
480   v->data        = (void*)s;
481   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
482   s->nghost      = nghost;
483   v->petscnative = PETSC_TRUE;
484 
485   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
486 
487   s->array           = (PetscScalar*)array;
488   s->array_allocated = 0;
489   if (alloc && !array) {
490     PetscInt n = v->map->n+nghost;
491     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
492     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
493     ierr               = PetscMemzero(s->array,n*sizeof(PetscScalar));CHKERRQ(ierr);
494     s->array_allocated = s->array;
495   }
496 
497   /* By default parallel vectors do not have local representation */
498   s->localrep    = 0;
499   s->localupdate = 0;
500 
501   v->stash.insertmode = NOT_SET_VALUES;
502   v->bstash.insertmode = NOT_SET_VALUES;
503   /* create the stashes. The block-size for bstash is set later when
504      VecSetValuesBlocked is called.
505   */
506   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
507   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);CHKERRQ(ierr);
508 
509 #if defined(PETSC_HAVE_MATLAB_ENGINE)
510   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
511   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
512 #endif
513   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
514   PetscFunctionReturn(0);
515 }
516 
517 /*MC
518    VECMPI - VECMPI = "mpi" - The basic parallel vector
519 
520    Options Database Keys:
521 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
522 
523   Level: beginner
524 
525 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
526 M*/
527 
528 #undef __FUNCT__
529 #define __FUNCT__ "VecCreate_MPI"
530 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
531 {
532   PetscErrorCode ierr;
533 
534   PetscFunctionBegin;
535   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
536   PetscFunctionReturn(0);
537 }
538 
539 /*MC
540    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
541 
542    Options Database Keys:
543 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
544 
545   Level: beginner
546 
547 .seealso: VecCreateSeq(), VecCreateMPI()
548 M*/
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "VecCreate_Standard"
552 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
553 {
554   PetscErrorCode ierr;
555   PetscMPIInt    size;
556 
557   PetscFunctionBegin;
558   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
559   if (size == 1) {
560     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
561   } else {
562     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
563   }
564   PetscFunctionReturn(0);
565 }
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "VecCreateMPIWithArray"
569 /*@C
570    VecCreateMPIWithArray - Creates a parallel, array-style vector,
571    where the user provides the array space to store the vector values.
572 
573    Collective on MPI_Comm
574 
575    Input Parameters:
576 +  comm  - the MPI communicator to use
577 .  bs    - block size, same meaning as VecSetBlockSize()
578 .  n     - local vector length, cannot be PETSC_DECIDE
579 .  N     - global vector length (or PETSC_DECIDE to have calculated)
580 -  array - the user provided array to store the vector values
581 
582    Output Parameter:
583 .  vv - the vector
584 
585    Notes:
586    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
587    same type as an existing vector.
588 
589    If the user-provided array is NULL, then VecPlaceArray() can be used
590    at a later stage to SET the array for storing the vector values.
591 
592    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
593    The user should not free the array until the vector is destroyed.
594 
595    Level: intermediate
596 
597    Concepts: vectors^creating with array
598 
599 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
600           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
601 
602 @*/
603 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
604 {
605   PetscErrorCode ierr;
606 
607   PetscFunctionBegin;
608   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
609   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
610   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
611   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
612   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
613   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "VecCreateGhostWithArray"
619 /*@C
620    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
621    the caller allocates the array space.
622 
623    Collective on MPI_Comm
624 
625    Input Parameters:
626 +  comm - the MPI communicator to use
627 .  n - local vector length
628 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
629 .  nghost - number of local ghost points
630 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
631 -  array - the space to store the vector values (as long as n + nghost)
632 
633    Output Parameter:
634 .  vv - the global vector representation (without ghost points as part of vector)
635 
636    Notes:
637    Use VecGhostGetLocalForm() to access the local, ghosted representation
638    of the vector.
639 
640    This also automatically sets the ISLocalToGlobalMapping() for this vector.
641 
642    Level: advanced
643 
644    Concepts: vectors^creating with array
645    Concepts: vectors^ghosted
646 
647 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
648           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
649           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
650 
651 @*/
652 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
653 {
654   PetscErrorCode         ierr;
655   Vec_MPI                *w;
656   PetscScalar            *larray;
657   IS                     from,to;
658   ISLocalToGlobalMapping ltog;
659   PetscInt               rstart,i,*indices;
660 
661   PetscFunctionBegin;
662   *vv = 0;
663 
664   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
665   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
666   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
667   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
668   /* Create global representation */
669   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
670   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
671   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
672   w    = (Vec_MPI*)(*vv)->data;
673   /* Create local representation */
674   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
675   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
676   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
677   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
678 
679   /*
680        Create scatter context for scattering (updating) ghost values
681   */
682   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
683   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
684   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
685   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
686   ierr = ISDestroy(&to);CHKERRQ(ierr);
687   ierr = ISDestroy(&from);CHKERRQ(ierr);
688 
689   /* set local to global mapping for ghosted vector */
690   ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
691   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
692   for (i=0; i<n; i++) {
693     indices[i] = rstart + i;
694   }
695   for (i=0; i<nghost; i++) {
696     indices[n+i] = ghosts[i];
697   }
698   ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
699   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
700   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
701   PetscFunctionReturn(0);
702 }
703 
704 #undef __FUNCT__
705 #define __FUNCT__ "VecCreateGhost"
706 /*@
707    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
708 
709    Collective on MPI_Comm
710 
711    Input Parameters:
712 +  comm - the MPI communicator to use
713 .  n - local vector length
714 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
715 .  nghost - number of local ghost points
716 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
717 
718    Output Parameter:
719 .  vv - the global vector representation (without ghost points as part of vector)
720 
721    Notes:
722    Use VecGhostGetLocalForm() to access the local, ghosted representation
723    of the vector.
724 
725    This also automatically sets the ISLocalToGlobalMapping() for this vector.
726 
727    Level: advanced
728 
729    Concepts: vectors^ghosted
730 
731 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
732           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
733           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
734           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
735 
736 @*/
737 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
738 {
739   PetscErrorCode ierr;
740 
741   PetscFunctionBegin;
742   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "VecMPISetGhost"
748 /*@
749    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
750 
751    Collective on Vec
752 
753    Input Parameters:
754 +  vv - the MPI vector
755 .  nghost - number of local ghost points
756 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
757 
758 
759    Notes:
760    Use VecGhostGetLocalForm() to access the local, ghosted representation
761    of the vector.
762 
763    This also automatically sets the ISLocalToGlobalMapping() for this vector.
764 
765    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
766 
767    Level: advanced
768 
769    Concepts: vectors^ghosted
770 
771 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
772           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
773           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
774           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
775 
776 @*/
777 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
778 {
779   PetscErrorCode ierr;
780   PetscBool      flg;
781 
782   PetscFunctionBegin;
783   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
784   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
785   if (flg) {
786     PetscInt               n,N;
787     Vec_MPI                *w;
788     PetscScalar            *larray;
789     IS                     from,to;
790     ISLocalToGlobalMapping ltog;
791     PetscInt               rstart,i,*indices;
792     MPI_Comm               comm;
793 
794     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
795     n    = vv->map->n;
796     N    = vv->map->N;
797     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
798     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
799     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
800     w    = (Vec_MPI*)(vv)->data;
801     /* Create local representation */
802     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
803     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
804     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
805     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
806 
807     /*
808      Create scatter context for scattering (updating) ghost values
809      */
810     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
811     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
812     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
813     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
814     ierr = ISDestroy(&to);CHKERRQ(ierr);
815     ierr = ISDestroy(&from);CHKERRQ(ierr);
816 
817     /* set local to global mapping for ghosted vector */
818     ierr = PetscMalloc1(n+nghost,&indices);CHKERRQ(ierr);
819     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
820 
821     for (i=0; i<n; i++)      indices[i]   = rstart + i;
822     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
823 
824     ierr = ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
825     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
826     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
827   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
828   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
829   PetscFunctionReturn(0);
830 }
831 
832 
833 /* ------------------------------------------------------------------------------------------*/
834 #undef __FUNCT__
835 #define __FUNCT__ "VecCreateGhostBlockWithArray"
836 /*@C
837    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
838    the caller allocates the array space. Indices in the ghost region are based on blocks.
839 
840    Collective on MPI_Comm
841 
842    Input Parameters:
843 +  comm - the MPI communicator to use
844 .  bs - block size
845 .  n - local vector length
846 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
847 .  nghost - number of local ghost blocks
848 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
849 -  array - the space to store the vector values (as long as n + nghost*bs)
850 
851    Output Parameter:
852 .  vv - the global vector representation (without ghost points as part of vector)
853 
854    Notes:
855    Use VecGhostGetLocalForm() to access the local, ghosted representation
856    of the vector.
857 
858    n is the local vector size (total local size not the number of blocks) while nghost
859    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
860    portion is bs*nghost
861 
862    Level: advanced
863 
864    Concepts: vectors^creating ghosted
865    Concepts: vectors^creating with array
866 
867 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
868           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
869           VecCreateGhostWithArray(), VecCreateGhostBlock()
870 
871 @*/
872 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
873 {
874   PetscErrorCode         ierr;
875   Vec_MPI                *w;
876   PetscScalar            *larray;
877   IS                     from,to;
878   ISLocalToGlobalMapping ltog;
879   PetscInt               rstart,i,nb,*indices;
880 
881   PetscFunctionBegin;
882   *vv = 0;
883 
884   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
885   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
886   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
887   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
888   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
889   /* Create global representation */
890   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
891   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
892   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
893   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
894   w    = (Vec_MPI*)(*vv)->data;
895   /* Create local representation */
896   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
897   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
898   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
899   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
900 
901   /*
902        Create scatter context for scattering (updating) ghost values
903   */
904   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
905   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
906   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
907   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
908   ierr = ISDestroy(&to);CHKERRQ(ierr);
909   ierr = ISDestroy(&from);CHKERRQ(ierr);
910 
911   /* set local to global mapping for ghosted vector */
912   nb   = n/bs;
913   ierr = PetscMalloc1(nb+nghost,&indices);CHKERRQ(ierr);
914   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
915 
916   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
917   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
918 
919   ierr = ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
920   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
921   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
922   PetscFunctionReturn(0);
923 }
924 
925 #undef __FUNCT__
926 #define __FUNCT__ "VecCreateGhostBlock"
927 /*@
928    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
929         The indicing of the ghost points is done with blocks.
930 
931    Collective on MPI_Comm
932 
933    Input Parameters:
934 +  comm - the MPI communicator to use
935 .  bs - the block size
936 .  n - local vector length
937 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
938 .  nghost - number of local ghost blocks
939 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
940 
941    Output Parameter:
942 .  vv - the global vector representation (without ghost points as part of vector)
943 
944    Notes:
945    Use VecGhostGetLocalForm() to access the local, ghosted representation
946    of the vector.
947 
948    n is the local vector size (total local size not the number of blocks) while nghost
949    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
950    portion is bs*nghost
951 
952    Level: advanced
953 
954    Concepts: vectors^ghosted
955 
956 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
957           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
958           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
959 
960 @*/
961 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
962 {
963   PetscErrorCode ierr;
964 
965   PetscFunctionBegin;
966   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
967   PetscFunctionReturn(0);
968 }
969