xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision b5a01e9f59413b2670aa9c2766cecc5f3cc9f5bd)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecDot_MPI"
9 static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10 {
11   PetscScalar    sum,work;
12   PetscErrorCode ierr;
13 
14   PetscFunctionBegin;
15   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
16   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
17   *z   = sum;
18   PetscFunctionReturn(0);
19 }
20 
21 #undef __FUNCT__
22 #define __FUNCT__ "VecTDot_MPI"
23 static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24 {
25   PetscScalar    sum,work;
26   PetscErrorCode ierr;
27 
28   PetscFunctionBegin;
29   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
30   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
31   *z   = sum;
32   PetscFunctionReturn(0);
33 }
34 
35 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "VecPlaceArray_MPI"
39 static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
40 {
41   PetscErrorCode ierr;
42   Vec_MPI        *v = (Vec_MPI*)vin->data;
43 
44   PetscFunctionBegin;
45   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
46   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
47   v->array         = (PetscScalar*)a;
48   if (v->localrep) {
49     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "VecDuplicate_MPI"
58 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
59 {
60   PetscErrorCode ierr;
61   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
62   PetscScalar    *array;
63 
64   PetscFunctionBegin;
65   ierr = VecCreate(PetscObjectComm((PetscObject)win),v);CHKERRQ(ierr);
66   ierr = PetscLayoutReference(win->map,&(*v)->map);CHKERRQ(ierr);
67 
68   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
69   vw   = (Vec_MPI*)(*v)->data;
70   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
71 
72   /* save local representation of the parallel vector (and scatter) if it exists */
73   if (w->localrep) {
74     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
75     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->bs,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
76     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
77     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
78     ierr = PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);CHKERRQ(ierr);
79 
80     vw->localupdate = w->localupdate;
81     if (vw->localupdate) {
82       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
83     }
84   }
85 
86   /* New vector should inherit stashing property of parent */
87   (*v)->stash.donotstash   = win->stash.donotstash;
88   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
89 
90   ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
91   ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
92 
93   (*v)->map->bs   = win->map->bs;
94   (*v)->bstash.bs = win->bstash.bs;
95   PetscFunctionReturn(0);
96 }
97 
98 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
99 extern PetscErrorCode VecResetArray_MPI(Vec);
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "VecAssemblySend_MPI_Private"
103 static PetscErrorCode VecAssemblySend_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rankid,PetscMPIInt rank,void *sdata,MPI_Request req[],void *ctx)
104 {
105   Vec X = (Vec)ctx;
106   Vec_MPI *x = (Vec_MPI*)X->data;
107   VecAssemblyHeader *hdr = (VecAssemblyHeader*)sdata;
108   PetscInt bs = X->map->bs;
109   PetscErrorCode ierr;
110 
111   PetscFunctionBegin;
112   if (hdr->count) {
113     ierr = MPI_Isend(x->sendptrs[rankid].ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
114     ierr = MPI_Isend(x->sendptrs[rankid].scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
115   }
116   if (hdr->bcount) {
117     ierr = MPI_Isend(x->sendptrs[rankid].intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
118     ierr = MPI_Isend(x->sendptrs[rankid].scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
119   }
120   PetscFunctionReturn(0);
121 }
122 
123 #undef __FUNCT__
124 #define __FUNCT__ "VecAssemblyRecv_MPI_Private"
125 static PetscErrorCode VecAssemblyRecv_MPI_Private(MPI_Comm comm,const PetscMPIInt tag[],PetscMPIInt rank,void *rdata,MPI_Request req[],void *ctx)
126 {
127   Vec X = (Vec)ctx;
128   Vec_MPI *x = (Vec_MPI*)X->data;
129   VecAssemblyHeader *hdr = (VecAssemblyHeader*)rdata;
130   PetscErrorCode ierr;
131   PetscInt bs = X->map->bs;
132   VecAssemblyFrame *frame;
133 
134   PetscFunctionBegin;
135   ierr = PetscSegBufferGet(x->segrecvframe,1,&frame);CHKERRQ(ierr);
136 
137   if (hdr->count) {
138     ierr = PetscSegBufferGet(x->segrecvint,hdr->count,&frame->ints);CHKERRQ(ierr);
139     ierr = MPI_Irecv(frame->ints,hdr->count,MPIU_INT,rank,tag[0],comm,&req[0]);CHKERRQ(ierr);
140     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->count,&frame->scalars);CHKERRQ(ierr);
141     ierr = MPI_Irecv(frame->scalars,hdr->count,MPIU_SCALAR,rank,tag[1],comm,&req[1]);CHKERRQ(ierr);
142     frame->pendings = 2;
143   } else {
144     frame->ints = NULL;
145     frame->scalars = NULL;
146     frame->pendings = 0;
147   }
148 
149   if (hdr->bcount) {
150     ierr = PetscSegBufferGet(x->segrecvint,hdr->bcount,&frame->intb);CHKERRQ(ierr);
151     ierr = MPI_Irecv(frame->intb,hdr->bcount,MPIU_INT,rank,tag[2],comm,&req[2]);CHKERRQ(ierr);
152     ierr = PetscSegBufferGet(x->segrecvscalar,hdr->bcount*bs,&frame->scalarb);CHKERRQ(ierr);
153     ierr = MPI_Irecv(frame->scalarb,hdr->bcount*bs,MPIU_SCALAR,rank,tag[3],comm,&req[3]);CHKERRQ(ierr);
154     frame->pendingb = 2;
155   } else {
156     frame->intb = NULL;
157     frame->scalarb = NULL;
158     frame->pendingb = 0;
159   }
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "VecAssemblyBegin_MPI_BTS"
165 static PetscErrorCode VecAssemblyBegin_MPI_BTS(Vec X)
166 {
167   Vec_MPI        *x = (Vec_MPI*)X->data;
168   PetscErrorCode ierr;
169   MPI_Comm       comm;
170   PetscInt       i,j,jb,bs;
171 
172   PetscFunctionBegin;
173   if (X->stash.donotstash) PetscFunctionReturn(0);
174 
175   ierr = PetscObjectGetComm((PetscObject)X,&comm);CHKERRQ(ierr);
176   ierr = VecGetBlockSize(X,&bs);CHKERRQ(ierr);
177 #if defined(PETSC_USE_DEBUG)
178   {
179     InsertMode addv;
180     ierr = MPI_Allreduce((PetscEnum*)&X->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);CHKERRQ(ierr);
181     if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
182   }
183 #endif
184 
185   ierr = VecStashSortCompress_Private(&X->stash);CHKERRQ(ierr);
186   ierr = VecStashSortCompress_Private(&X->bstash);CHKERRQ(ierr);
187 
188   if (!x->sendranks) {
189     PetscInt nowners,bnowners,*owners,*bowners;
190     ierr = VecStashGetOwnerList_Private(&X->stash,X->map,&nowners,&owners);CHKERRQ(ierr);
191     ierr = VecStashGetOwnerList_Private(&X->bstash,X->map,&bnowners,&bowners);CHKERRQ(ierr);
192     ierr = PetscMergeMPIIntArray(nowners,owners,bnowners,bowners,&x->nsendranks,&x->sendranks);CHKERRQ(ierr);
193     ierr = PetscFree(owners);CHKERRQ(ierr);
194     ierr = PetscFree(bowners);CHKERRQ(ierr);
195     ierr = PetscMalloc1(x->nsendranks,&x->sendhdr);CHKERRQ(ierr);
196     ierr = PetscMalloc1(x->nsendranks,&x->sendptrs);CHKERRQ(ierr);
197   }
198   for (i=0,j=0,jb=0; i<x->nsendranks; i++) {
199     PetscMPIInt rank = x->sendranks[i];
200     x->sendhdr[i].insertmode = X->stash.insertmode;
201     x->sendptrs[i].ints    = &X->stash.idx[j];
202     x->sendptrs[i].scalars = &X->stash.array[j];
203     for (x->sendhdr[i].count=0; j<X->stash.n && X->stash.idx[j] < X->map->range[rank+1]; j++) x->sendhdr[i].count++;
204     x->sendptrs[i].intb    = &X->bstash.idx[jb];
205     x->sendptrs[i].scalarb = &X->bstash.array[jb*bs];
206     for (x->sendhdr[i].bcount=0; jb<X->bstash.n && X->bstash.idx[jb]*bs < X->map->range[rank+1]; jb++) x->sendhdr[i].bcount++;
207   }
208 
209   if (!x->segrecvint) {ierr = PetscSegBufferCreate(sizeof(PetscInt),1000,&x->segrecvint);CHKERRQ(ierr);}
210   if (!x->segrecvscalar) {ierr = PetscSegBufferCreate(sizeof(PetscScalar),1000,&x->segrecvscalar);CHKERRQ(ierr);}
211   if (!x->segrecvframe) {ierr = PetscSegBufferCreate(sizeof(VecAssemblyFrame),50,&x->segrecvframe);CHKERRQ(ierr);}
212   if (x->recvhdr) {             /* VEC_SUBSET_OFF_PROC_ENTRIES and this is not the first assembly */
213     PetscMPIInt tag[4];
214     if (!x->assembly_subset) SETERRQ(comm,PETSC_ERR_PLIB,"Attempt to reuse rendezvous when not VEC_SUBSET_OFF_PROC_ENTRIES");
215     for (i=0; i<4; i++) {ierr = PetscCommGetNewTag(comm,&tag[i]);CHKERRQ(ierr);}
216     for (i=0; i<x->nsendranks; i++) {
217       ierr = VecAssemblySend_MPI_Private(comm,tag,i,x->sendranks[i],x->sendhdr+i,x->sendreqs+4*i,X);CHKERRQ(ierr);
218     }
219     for (i=0; i<x->nrecvranks; i++) {
220       ierr = VecAssemblyRecv_MPI_Private(comm,tag,x->recvranks[i],x->recvhdr+i,x->recvreqs+4*i,X);CHKERRQ(ierr);
221     }
222     x->use_status = PETSC_TRUE;
223   } else {                      /* First time */
224     ierr = PetscCommBuildTwoSidedFReq(comm,3,MPIU_INT,x->nsendranks,x->sendranks,(PetscInt*)x->sendhdr,&x->nrecvranks,&x->recvranks,&x->recvhdr,4,&x->sendreqs,&x->recvreqs,VecAssemblySend_MPI_Private,VecAssemblyRecv_MPI_Private,X);CHKERRQ(ierr);
225     x->use_status = PETSC_FALSE;
226   }
227 
228   {
229     PetscInt nstash,reallocs;
230     ierr = VecStashGetInfo_Private(&X->stash,&nstash,&reallocs);CHKERRQ(ierr);
231     ierr = PetscInfo2(X,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
232     ierr = VecStashGetInfo_Private(&X->bstash,&nstash,&reallocs);CHKERRQ(ierr);
233     ierr = PetscInfo2(X,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr);
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "VecAssemblyEnd_MPI_BTS"
240 static PetscErrorCode VecAssemblyEnd_MPI_BTS(Vec X)
241 {
242   Vec_MPI *x = (Vec_MPI*)X->data;
243   PetscInt bs = X->map->bs;
244   PetscMPIInt npending,*some_indices,r;
245   MPI_Status  *some_statuses;
246   PetscScalar *xarray;
247   PetscErrorCode ierr;
248   VecAssemblyFrame *frame;
249 
250   PetscFunctionBegin;
251   if (X->stash.donotstash) PetscFunctionReturn(0);
252 
253   ierr = VecGetArray(X,&xarray);CHKERRQ(ierr);
254   ierr = PetscSegBufferExtractInPlace(x->segrecvframe,&frame);CHKERRQ(ierr);
255   ierr = PetscMalloc2(4*x->nrecvranks,&some_indices,x->use_status?4*x->nrecvranks:0,&some_statuses);CHKERRQ(ierr);
256   for (r=0,npending=0; r<x->nrecvranks; r++) npending += frame[r].pendings + frame[r].pendingb;
257   while (npending>0) {
258     PetscMPIInt ndone,ii;
259     /* Filling MPI_Status fields requires some resources from the MPI library.  We skip it on the first assembly, or
260      * when VEC_SUBSET_OFF_PROC_ENTRIES has not been set, because we could exchange exact sizes in the initial
261      * rendezvous.  When the rendezvous is elided, however, we use MPI_Status to get actual message lengths, so that
262      * subsequent assembly can set a proper subset of the values. */
263     ierr = MPI_Waitsome(4*x->nrecvranks,x->recvreqs,&ndone,some_indices,x->use_status?some_statuses:MPI_STATUSES_IGNORE);CHKERRQ(ierr);
264     for (ii=0; ii<ndone; ii++) {
265       PetscInt i = some_indices[ii]/4,j,k;
266       InsertMode imode = x->recvhdr[i].insertmode;
267       PetscInt *recvint;
268       PetscScalar *recvscalar;
269       PetscBool intmsg = (PetscBool)(some_indices[ii]%2 == 0);
270       PetscBool blockmsg = (PetscBool)((some_indices[ii]%4)/2 == 1);
271       npending--;
272       if (!blockmsg) { /* Scalar stash */
273         PetscMPIInt count;
274         if (--frame[i].pendings > 0) continue;
275         if (x->use_status) {
276           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
277         } else count = x->recvhdr[i].count;
278         for (j=0,recvint=frame[i].ints,recvscalar=frame[i].scalars; j<count; j++,recvint++) {
279           PetscInt loc = *recvint - X->map->rstart;
280           if (*recvint < X->map->rstart || X->map->rend <= *recvint) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Received vector entry %D out of local range [%D,%D)]",*recvint,X->map->rstart,X->map->rend);
281           switch (imode) {
282           case ADD_VALUES:
283             xarray[loc] += *recvscalar++;
284             break;
285           case INSERT_VALUES:
286             xarray[loc] = *recvscalar++;
287             break;
288           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
289           }
290         }
291       } else {                  /* Block stash */
292         PetscMPIInt count;
293         if (--frame[i].pendingb > 0) continue;
294         if (x->use_status) {
295           ierr = MPI_Get_count(&some_statuses[ii],intmsg ? MPIU_INT : MPIU_SCALAR,&count);CHKERRQ(ierr);
296           if (!intmsg) count /= bs; /* Convert from number of scalars to number of blocks */
297         } else count = x->recvhdr[i].bcount;
298         for (j=0,recvint=frame[i].intb,recvscalar=frame[i].scalarb; j<count; j++,recvint++) {
299           PetscInt loc = (*recvint)*bs - X->map->rstart;
300           switch (imode) {
301           case ADD_VALUES:
302             for (k=loc; k<loc+bs; k++) xarray[k] += *recvscalar++;
303             break;
304           case INSERT_VALUES:
305             for (k=loc; k<loc+bs; k++) xarray[k] = *recvscalar++;
306             break;
307           default: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Insert mode not supported 0x%x",imode);
308           }
309         }
310       }
311     }
312   }
313   ierr = VecRestoreArray(X,&xarray);CHKERRQ(ierr);
314   ierr = MPI_Waitall(4*x->nsendranks,x->sendreqs,MPI_STATUSES_IGNORE);CHKERRQ(ierr);
315   ierr = PetscFree2(some_indices,some_statuses);CHKERRQ(ierr);
316   if (x->assembly_subset) {
317     void *dummy;                /* reset segbuffers */
318     ierr = PetscSegBufferExtractInPlace(x->segrecvint,&dummy);CHKERRQ(ierr);
319     ierr = PetscSegBufferExtractInPlace(x->segrecvscalar,&dummy);CHKERRQ(ierr);
320   } else {
321     ierr = VecAssemblyReset_MPI(X);CHKERRQ(ierr);
322   }
323 
324   X->stash.insertmode = NOT_SET_VALUES;
325   ierr = VecStashScatterEnd_Private(&X->stash);CHKERRQ(ierr);
326   ierr = VecStashScatterEnd_Private(&X->bstash);CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "VecAssemblyReset_MPI"
332 PetscErrorCode VecAssemblyReset_MPI(Vec X)
333 {
334   Vec_MPI *x = (Vec_MPI*)X->data;
335   PetscErrorCode ierr;
336 
337   PetscFunctionBegin;
338   ierr = PetscFree(x->sendreqs);CHKERRQ(ierr);
339   ierr = PetscFree(x->recvreqs);CHKERRQ(ierr);
340   ierr = PetscFree(x->sendranks);CHKERRQ(ierr);
341   ierr = PetscFree(x->recvranks);CHKERRQ(ierr);
342   ierr = PetscFree(x->sendhdr);CHKERRQ(ierr);
343   ierr = PetscFree(x->recvhdr);CHKERRQ(ierr);
344   ierr = PetscFree(x->sendptrs);CHKERRQ(ierr);
345   ierr = PetscSegBufferDestroy(&x->segrecvint);CHKERRQ(ierr);
346   ierr = PetscSegBufferDestroy(&x->segrecvscalar);CHKERRQ(ierr);
347   ierr = PetscSegBufferDestroy(&x->segrecvframe);CHKERRQ(ierr);
348   PetscFunctionReturn(0);
349 }
350 
351 
352 #undef __FUNCT__
353 #define __FUNCT__ "VecSetFromOptions_MPI"
354 static PetscErrorCode VecSetFromOptions_MPI(Vec X)
355 {
356   PetscErrorCode ierr;
357   PetscBool      flg = PETSC_FALSE,set;
358 
359   PetscFunctionBegin;
360   ierr = PetscOptionsBool("-vec_assembly_bts","Use BuildTwoSided version of assembly","",flg,&flg,&set);CHKERRQ(ierr);
361   if (set) {
362     X->ops->assemblybegin = flg ? VecAssemblyBegin_MPI_BTS : VecAssemblyBegin_MPI;
363     X->ops->assemblyend   = flg ? VecAssemblyEnd_MPI_BTS   : VecAssemblyEnd_MPI;
364   }
365   PetscFunctionReturn(0);
366 }
367 
368 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
369                                 VecDuplicateVecs_Default,
370                                 VecDestroyVecs_Default,
371                                 VecDot_MPI,
372                                 VecMDot_MPI,
373                                 VecNorm_MPI,
374                                 VecTDot_MPI,
375                                 VecMTDot_MPI,
376                                 VecScale_Seq,
377                                 VecCopy_Seq, /* 10 */
378                                 VecSet_Seq,
379                                 VecSwap_Seq,
380                                 VecAXPY_Seq,
381                                 VecAXPBY_Seq,
382                                 VecMAXPY_Seq,
383                                 VecAYPX_Seq,
384                                 VecWAXPY_Seq,
385                                 VecAXPBYPCZ_Seq,
386                                 VecPointwiseMult_Seq,
387                                 VecPointwiseDivide_Seq,
388                                 VecSetValues_MPI, /* 20 */
389                                 VecAssemblyBegin_MPI_BTS,
390                                 VecAssemblyEnd_MPI_BTS,
391                                 0,
392                                 VecGetSize_MPI,
393                                 VecGetSize_Seq,
394                                 0,
395                                 VecMax_MPI,
396                                 VecMin_MPI,
397                                 VecSetRandom_Seq,
398                                 VecSetOption_MPI,
399                                 VecSetValuesBlocked_MPI,
400                                 VecDestroy_MPI,
401                                 VecView_MPI,
402                                 VecPlaceArray_MPI,
403                                 VecReplaceArray_Seq,
404                                 VecDot_Seq,
405                                 VecTDot_Seq,
406                                 VecNorm_Seq,
407                                 VecMDot_Seq,
408                                 VecMTDot_Seq,
409                                 VecLoad_Default,
410                                 VecReciprocal_Default,
411                                 VecConjugate_Seq,
412                                 0,
413                                 0,
414                                 VecResetArray_MPI,
415                                 VecSetFromOptions_MPI,
416                                 VecMaxPointwiseDivide_Seq,
417                                 VecPointwiseMax_Seq,
418                                 VecPointwiseMaxAbs_Seq,
419                                 VecPointwiseMin_Seq,
420                                 VecGetValues_MPI,
421                                 0,
422                                 0,
423                                 0,
424                                 0,
425                                 0,
426                                 0,
427                                 VecStrideGather_Default,
428                                 VecStrideScatter_Default,
429                                 0,
430                                 0,
431                                 0,
432                                 0,
433                                 0
434 };
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "VecCreate_MPI_Private"
438 /*
439     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
440     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
441     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
442 
443     If alloc is true and array is NULL then this routine allocates the space, otherwise
444     no space is allocated.
445 */
446 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
447 {
448   Vec_MPI        *s;
449   PetscErrorCode ierr;
450 
451   PetscFunctionBegin;
452   ierr           = PetscNewLog(v,&s);CHKERRQ(ierr);
453   v->data        = (void*)s;
454   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
455   s->nghost      = nghost;
456   v->petscnative = PETSC_TRUE;
457 
458   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
459 
460   s->array           = (PetscScalar*)array;
461   s->array_allocated = 0;
462   if (alloc && !array) {
463     PetscInt n = v->map->n+nghost;
464     ierr               = PetscMalloc1(n,&s->array);CHKERRQ(ierr);
465     ierr               = PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));CHKERRQ(ierr);
466     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
467     s->array_allocated = s->array;
468   }
469 
470   /* By default parallel vectors do not have local representation */
471   s->localrep    = 0;
472   s->localupdate = 0;
473 
474   v->stash.insertmode = NOT_SET_VALUES;
475   /* create the stashes. The block-size for bstash is set later when
476      VecSetValuesBlocked is called.
477   */
478   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);CHKERRQ(ierr);
479   ierr = VecStashCreate_Private(PetscObjectComm((PetscObject)v),v->map->bs,&v->bstash);CHKERRQ(ierr);
480 
481 #if defined(PETSC_HAVE_MATLAB_ENGINE)
482   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);CHKERRQ(ierr);
483   ierr = PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);CHKERRQ(ierr);
484 #endif
485   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 /*MC
490    VECMPI - VECMPI = "mpi" - The basic parallel vector
491 
492    Options Database Keys:
493 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
494 
495   Level: beginner
496 
497 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
498 M*/
499 
500 #undef __FUNCT__
501 #define __FUNCT__ "VecCreate_MPI"
502 PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
503 {
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 /*MC
512    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
513 
514    Options Database Keys:
515 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
516 
517   Level: beginner
518 
519 .seealso: VecCreateSeq(), VecCreateMPI()
520 M*/
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "VecCreate_Standard"
524 PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
525 {
526   PetscErrorCode ierr;
527   PetscMPIInt    size;
528 
529   PetscFunctionBegin;
530   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
531   if (size == 1) {
532     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
533   } else {
534     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
535   }
536   PetscFunctionReturn(0);
537 }
538 
539 #undef __FUNCT__
540 #define __FUNCT__ "VecCreateMPIWithArray"
541 /*@C
542    VecCreateMPIWithArray - Creates a parallel, array-style vector,
543    where the user provides the array space to store the vector values.
544 
545    Collective on MPI_Comm
546 
547    Input Parameters:
548 +  comm  - the MPI communicator to use
549 .  bs    - block size, same meaning as VecSetBlockSize()
550 .  n     - local vector length, cannot be PETSC_DECIDE
551 .  N     - global vector length (or PETSC_DECIDE to have calculated)
552 -  array - the user provided array to store the vector values
553 
554    Output Parameter:
555 .  vv - the vector
556 
557    Notes:
558    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
559    same type as an existing vector.
560 
561    If the user-provided array is NULL, then VecPlaceArray() can be used
562    at a later stage to SET the array for storing the vector values.
563 
564    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
565    The user should not free the array until the vector is destroyed.
566 
567    Level: intermediate
568 
569    Concepts: vectors^creating with array
570 
571 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
572           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
573 
574 @*/
575 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
576 {
577   PetscErrorCode ierr;
578 
579   PetscFunctionBegin;
580   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
581   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
582   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
583   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
584   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
585   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
586   PetscFunctionReturn(0);
587 }
588 
589 #undef __FUNCT__
590 #define __FUNCT__ "VecCreateGhostWithArray"
591 /*@C
592    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
593    the caller allocates the array space.
594 
595    Collective on MPI_Comm
596 
597    Input Parameters:
598 +  comm - the MPI communicator to use
599 .  n - local vector length
600 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
601 .  nghost - number of local ghost points
602 .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
603 -  array - the space to store the vector values (as long as n + nghost)
604 
605    Output Parameter:
606 .  vv - the global vector representation (without ghost points as part of vector)
607 
608    Notes:
609    Use VecGhostGetLocalForm() to access the local, ghosted representation
610    of the vector.
611 
612    This also automatically sets the ISLocalToGlobalMapping() for this vector.
613 
614    Level: advanced
615 
616    Concepts: vectors^creating with array
617    Concepts: vectors^ghosted
618 
619 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
620           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
621           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
622 
623 @*/
624 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
625 {
626   PetscErrorCode         ierr;
627   Vec_MPI                *w;
628   PetscScalar            *larray;
629   IS                     from,to;
630   ISLocalToGlobalMapping ltog;
631   PetscInt               rstart,i,*indices;
632 
633   PetscFunctionBegin;
634   *vv = 0;
635 
636   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
637   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
638   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
639   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
640   /* Create global representation */
641   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
642   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
643   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
644   w    = (Vec_MPI*)(*vv)->data;
645   /* Create local representation */
646   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
647   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
648   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
649   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
650 
651   /*
652        Create scatter context for scattering (updating) ghost values
653   */
654   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
655   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
656   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
657   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
658   ierr = ISDestroy(&to);CHKERRQ(ierr);
659   ierr = ISDestroy(&from);CHKERRQ(ierr);
660 
661   /* set local to global mapping for ghosted vector */
662   ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
663   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
664   for (i=0; i<n; i++) {
665     indices[i] = rstart + i;
666   }
667   for (i=0; i<nghost; i++) {
668     indices[n+i] = ghosts[i];
669   }
670   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
671   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
672   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
673   PetscFunctionReturn(0);
674 }
675 
676 #undef __FUNCT__
677 #define __FUNCT__ "VecCreateGhost"
678 /*@
679    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
680 
681    Collective on MPI_Comm
682 
683    Input Parameters:
684 +  comm - the MPI communicator to use
685 .  n - local vector length
686 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
687 .  nghost - number of local ghost points
688 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
689 
690    Output Parameter:
691 .  vv - the global vector representation (without ghost points as part of vector)
692 
693    Notes:
694    Use VecGhostGetLocalForm() to access the local, ghosted representation
695    of the vector.
696 
697    This also automatically sets the ISLocalToGlobalMapping() for this vector.
698 
699    Level: advanced
700 
701    Concepts: vectors^ghosted
702 
703 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
704           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
705           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
706           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
707 
708 @*/
709 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
710 {
711   PetscErrorCode ierr;
712 
713   PetscFunctionBegin;
714   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "VecMPISetGhost"
720 /*@
721    VecMPISetGhost - Sets the ghost points for an MPI ghost vector
722 
723    Collective on Vec
724 
725    Input Parameters:
726 +  vv - the MPI vector
727 .  nghost - number of local ghost points
728 -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)
729 
730 
731    Notes:
732    Use VecGhostGetLocalForm() to access the local, ghosted representation
733    of the vector.
734 
735    This also automatically sets the ISLocalToGlobalMapping() for this vector.
736 
737    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
738 
739    Level: advanced
740 
741    Concepts: vectors^ghosted
742 
743 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
744           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
745           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
746           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
747 
748 @*/
749 PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
750 {
751   PetscErrorCode ierr;
752   PetscBool      flg;
753 
754   PetscFunctionBegin;
755   ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
756   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
757   if (flg) {
758     PetscInt               n,N;
759     Vec_MPI                *w;
760     PetscScalar            *larray;
761     IS                     from,to;
762     ISLocalToGlobalMapping ltog;
763     PetscInt               rstart,i,*indices;
764     MPI_Comm               comm;
765 
766     ierr = PetscObjectGetComm((PetscObject)vv,&comm);CHKERRQ(ierr);
767     n    = vv->map->n;
768     N    = vv->map->N;
769     ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
770     ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
771     ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);CHKERRQ(ierr);
772     w    = (Vec_MPI*)(vv)->data;
773     /* Create local representation */
774     ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
775     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
776     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);CHKERRQ(ierr);
777     ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);
778 
779     /*
780      Create scatter context for scattering (updating) ghost values
781      */
782     ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
783     ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
784     ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
785     ierr = PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
786     ierr = ISDestroy(&to);CHKERRQ(ierr);
787     ierr = ISDestroy(&from);CHKERRQ(ierr);
788 
789     /* set local to global mapping for ghosted vector */
790     ierr = PetscMalloc1((n+nghost),&indices);CHKERRQ(ierr);
791     ierr = VecGetOwnershipRange(vv,&rstart,NULL);CHKERRQ(ierr);
792 
793     for (i=0; i<n; i++)      indices[i]   = rstart + i;
794     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];
795 
796     ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
797     ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
798     ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
799   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
800   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
801   PetscFunctionReturn(0);
802 }
803 
804 
805 /* ------------------------------------------------------------------------------------------*/
806 #undef __FUNCT__
807 #define __FUNCT__ "VecCreateGhostBlockWithArray"
808 /*@C
809    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
810    the caller allocates the array space. Indices in the ghost region are based on blocks.
811 
812    Collective on MPI_Comm
813 
814    Input Parameters:
815 +  comm - the MPI communicator to use
816 .  bs - block size
817 .  n - local vector length
818 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
819 .  nghost - number of local ghost blocks
820 .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
821 -  array - the space to store the vector values (as long as n + nghost*bs)
822 
823    Output Parameter:
824 .  vv - the global vector representation (without ghost points as part of vector)
825 
826    Notes:
827    Use VecGhostGetLocalForm() to access the local, ghosted representation
828    of the vector.
829 
830    n is the local vector size (total local size not the number of blocks) while nghost
831    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
832    portion is bs*nghost
833 
834    Level: advanced
835 
836    Concepts: vectors^creating ghosted
837    Concepts: vectors^creating with array
838 
839 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
840           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
841           VecCreateGhostWithArray(), VecCreateGhostBlock()
842 
843 @*/
844 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
845 {
846   PetscErrorCode         ierr;
847   Vec_MPI                *w;
848   PetscScalar            *larray;
849   IS                     from,to;
850   ISLocalToGlobalMapping ltog;
851   PetscInt               rstart,i,nb,*indices;
852 
853   PetscFunctionBegin;
854   *vv = 0;
855 
856   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
857   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
858   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
859   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
860   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
861   /* Create global representation */
862   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
863   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
864   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
865   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
866   w    = (Vec_MPI*)(*vv)->data;
867   /* Create local representation */
868   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
869   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
870   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);CHKERRQ(ierr);
871   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
872 
873   /*
874        Create scatter context for scattering (updating) ghost values
875   */
876   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
877   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
878   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
879   ierr = PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);CHKERRQ(ierr);
880   ierr = ISDestroy(&to);CHKERRQ(ierr);
881   ierr = ISDestroy(&from);CHKERRQ(ierr);
882 
883   /* set local to global mapping for ghosted vector */
884   nb   = n/bs;
885   ierr = PetscMalloc1((nb+nghost),&indices);CHKERRQ(ierr);
886   ierr = VecGetOwnershipRange(*vv,&rstart,NULL);CHKERRQ(ierr);
887 
888   for (i=0; i<nb; i++)      indices[i]    = rstart + i*bs;
889   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];
890 
891   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
892   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
893   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "VecCreateGhostBlock"
899 /*@
900    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
901         The indicing of the ghost points is done with blocks.
902 
903    Collective on MPI_Comm
904 
905    Input Parameters:
906 +  comm - the MPI communicator to use
907 .  bs - the block size
908 .  n - local vector length
909 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
910 .  nghost - number of local ghost blocks
911 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)
912 
913    Output Parameter:
914 .  vv - the global vector representation (without ghost points as part of vector)
915 
916    Notes:
917    Use VecGhostGetLocalForm() to access the local, ghosted representation
918    of the vector.
919 
920    n is the local vector size (total local size not the number of blocks) while nghost
921    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
922    portion is bs*nghost
923 
924    Level: advanced
925 
926    Concepts: vectors^ghosted
927 
928 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
929           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
930           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
931 
932 @*/
933 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
934 {
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941