xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision a32ab379bc196903d81f117b153e855bfa447008)
1 
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "VecPointwiseMax_Seq"
9 static PetscErrorCode VecPointwiseMax_Seq(Vec win,Vec xin,Vec yin)
10 {
11   PetscErrorCode ierr;
12   PetscInt       n = win->map->n,i;
13   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
14 
15   PetscFunctionBegin;
16   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
17   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
18   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
19   for (i=0; i<n; i++) {
20     ww[i] = PetscMax(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
21   }
22   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
23   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
24   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
25   ierr = PetscLogFlops(n);CHKERRQ(ierr);
26   PetscFunctionReturn(0);
27 }
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "VecPointwiseMin_Seq"
31 static PetscErrorCode VecPointwiseMin_Seq(Vec win,Vec xin,Vec yin)
32 {
33   PetscErrorCode ierr;
34   PetscInt       n = win->map->n,i;
35   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
36 
37   PetscFunctionBegin;
38   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
39   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
40   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
41   for (i=0; i<n; i++) {
42     ww[i] = PetscMin(PetscRealPart(xx[i]),PetscRealPart(yy[i]));
43   }
44   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
45   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
46   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
47   ierr = PetscLogFlops(n);CHKERRQ(ierr);
48   PetscFunctionReturn(0);
49 }
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "VecPointwiseMaxAbs_Seq"
53 static PetscErrorCode VecPointwiseMaxAbs_Seq(Vec win,Vec xin,Vec yin)
54 {
55   PetscErrorCode ierr;
56   PetscInt       n = win->map->n,i;
57   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
58 
59   PetscFunctionBegin;
60   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
61   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
62   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
63   for (i=0; i<n; i++) {
64     ww[i] = PetscMax(PetscAbsScalar(xx[i]),PetscAbsScalar(yy[i]));
65   }
66   ierr = PetscLogFlops(n);CHKERRQ(ierr);
67   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
68   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
69   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
70   PetscFunctionReturn(0);
71 }
72 
73 #include <../src/vec/vec/impls/seq/ftn-kernels/fxtimesy.h>
74 #undef __FUNCT__
75 #define __FUNCT__ "VecPointwiseMult_Seq"
76 static PetscErrorCode VecPointwiseMult_Seq(Vec win,Vec xin,Vec yin)
77 {
78   PetscErrorCode ierr;
79   PetscInt       n = win->map->n,i;
80   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
81 
82   PetscFunctionBegin;
83   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
84   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
85   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
86   if (ww == xx) {
87     for (i=0; i<n; i++) ww[i] *= yy[i];
88   } else if (ww == yy) {
89     for (i=0; i<n; i++) ww[i] *= xx[i];
90   } else {
91 #if defined(PETSC_USE_FORTRAN_KERNEL_XTIMESY)
92     fortranxtimesy_(xx,yy,ww,&n);
93 #else
94     for (i=0; i<n; i++) ww[i] = xx[i] * yy[i];
95 #endif
96   }
97   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
98   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
99   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
100   ierr = PetscLogFlops(n);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "VecPointwiseDivide_Seq"
106 static PetscErrorCode VecPointwiseDivide_Seq(Vec win,Vec xin,Vec yin)
107 {
108   PetscErrorCode ierr;
109   PetscInt       n = win->map->n,i;
110   PetscScalar    *ww,*xx,*yy; /* cannot make xx or yy const since might be ww */
111 
112   PetscFunctionBegin;
113   ierr = VecGetArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
114   ierr = VecGetArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
115   ierr = VecGetArray(win,&ww);CHKERRQ(ierr);
116   for (i=0; i<n; i++) {
117     ww[i] = xx[i] / yy[i];
118   }
119   ierr = PetscLogFlops(n);CHKERRQ(ierr);
120   ierr = VecRestoreArrayRead(xin,(const PetscScalar**)&xx);CHKERRQ(ierr);
121   ierr = VecRestoreArrayRead(yin,(const PetscScalar**)&yy);CHKERRQ(ierr);
122   ierr = VecRestoreArray(win,&ww);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "VecDot_MPI"
128 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
129 {
130   PetscScalar    sum,work;
131   PetscErrorCode ierr;
132 
133   PetscFunctionBegin;
134   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
135   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
136   *z = sum;
137   PetscFunctionReturn(0);
138 }
139 
140 #undef __FUNCT__
141 #define __FUNCT__ "VecTDot_MPI"
142 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
143 {
144   PetscScalar    sum,work;
145   PetscErrorCode ierr;
146 
147   PetscFunctionBegin;
148   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
149   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
150   *z   = sum;
151   PetscFunctionReturn(0);
152 }
153 
154 EXTERN_C_BEGIN
155 extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
156 EXTERN_C_END
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "VecPlaceArray_MPI"
160 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
161 {
162   PetscErrorCode ierr;
163   Vec_MPI        *v = (Vec_MPI *)vin->data;
164 
165   PetscFunctionBegin;
166   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
167   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
168   v->array = (PetscScalar *)a;
169   if (v->localrep) {
170     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
171   }
172   PetscFunctionReturn(0);
173 }
174 
175 extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "VecDuplicate_MPI"
179 static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
180 {
181   PetscErrorCode ierr;
182   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
183   PetscScalar    *array;
184 
185   PetscFunctionBegin;
186   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
187 
188   /* use the map that exists aleady in win */
189   ierr = PetscLayoutDestroy(&(*v)->map);CHKERRQ(ierr);
190   (*v)->map = win->map;
191   win->map->refcnt++;
192 
193   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
194   vw   = (Vec_MPI *)(*v)->data;
195   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
196 
197   /* save local representation of the parallel vector (and scatter) if it exists */
198   if (w->localrep) {
199     ierr = VecGetArray(*v,&array);CHKERRQ(ierr);
200     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
201     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
202     ierr = VecRestoreArray(*v,&array);CHKERRQ(ierr);
203     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
204     vw->localupdate = w->localupdate;
205     if (vw->localupdate) {
206       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
207     }
208   }
209 
210   /* New vector should inherit stashing property of parent */
211   (*v)->stash.donotstash = win->stash.donotstash;
212   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
213 
214   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
215   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
216   if (win->mapping) {
217     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
218     (*v)->mapping = win->mapping;
219   }
220   if (win->bmapping) {
221     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
222     (*v)->bmapping = win->bmapping;
223   }
224   (*v)->map->bs    = win->map->bs;
225   (*v)->bstash.bs = win->bstash.bs;
226 
227   PetscFunctionReturn(0);
228 }
229 
230 extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
231 extern PetscErrorCode VecResetArray_MPI(Vec);
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "VecSetRandom_Seq"
235 static PetscErrorCode VecSetRandom_Seq(Vec xin,PetscRandom r)
236 {
237   PetscErrorCode ierr;
238   PetscInt       n = xin->map->n,i;
239   PetscScalar    *xx;
240 
241   PetscFunctionBegin;
242   ierr = VecGetArray(xin,&xx);CHKERRQ(ierr);
243   for (i=0; i<n; i++) {ierr = PetscRandomGetValue(r,&xx[i]);CHKERRQ(ierr);}
244   ierr = VecRestoreArray(xin,&xx);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "VecGetSize_Seq"
251 static PetscErrorCode VecGetSize_Seq(Vec vin,PetscInt *size)
252 {
253   PetscFunctionBegin;
254   *size = vin->map->n;
255   PetscFunctionReturn(0);
256 }
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "VecConjugate_Seq"
260 static PetscErrorCode VecConjugate_Seq(Vec xin)
261 {
262   PetscScalar    *x;
263   PetscInt       n = xin->map->n;
264   PetscErrorCode ierr;
265 
266   PetscFunctionBegin;
267   ierr = VecGetArray(xin,&x);CHKERRQ(ierr);
268   while (n-->0) {
269     *x = PetscConj(*x);
270     x++;
271   }
272   ierr = VecRestoreArray(xin,&x);CHKERRQ(ierr);
273   PetscFunctionReturn(0);
274 }
275 
276 #undef __FUNCT__
277 #define __FUNCT__ "VecCopy_Seq"
278 static PetscErrorCode VecCopy_Seq(Vec xin,Vec yin)
279 {
280   PetscScalar       *ya;
281   const PetscScalar *xa;
282   PetscErrorCode    ierr;
283 
284   PetscFunctionBegin;
285   if (xin != yin) {
286     ierr = VecGetArrayRead(xin,&xa);CHKERRQ(ierr);
287     ierr = VecGetArray(yin,&ya);CHKERRQ(ierr);
288     ierr = PetscMemcpy(ya,xa,xin->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
289     ierr = VecRestoreArrayRead(xin,&xa);CHKERRQ(ierr);
290     ierr = VecRestoreArray(yin,&ya);CHKERRQ(ierr);
291   }
292   PetscFunctionReturn(0);
293 }
294 
295 #include <petscblaslapack.h>
296 #undef __FUNCT__
297 #define __FUNCT__ "VecSwap_Seq"
298 static PetscErrorCode VecSwap_Seq(Vec xin,Vec yin)
299 {
300   PetscScalar    *ya, *xa;
301   PetscErrorCode ierr;
302   PetscBLASInt   one = 1,bn = PetscBLASIntCast(xin->map->n);
303 
304   PetscFunctionBegin;
305   if (xin != yin) {
306     ierr = VecGetArray(xin,&xa);CHKERRQ(ierr);
307     ierr = VecGetArray(yin,&ya);CHKERRQ(ierr);
308     BLASswap_(&bn,xa,&one,ya,&one);
309     ierr = VecRestoreArray(xin,&xa);CHKERRQ(ierr);
310     ierr = VecRestoreArray(yin,&ya);CHKERRQ(ierr);
311   }
312   PetscFunctionReturn(0);
313 }
314 
315 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
316             VecDuplicateVecs_Default,
317             VecDestroyVecs_Default,
318             VecDot_MPI,
319             VecMDot_MPI,
320             VecNorm_MPI,
321             VecTDot_MPI,
322             VecMTDot_MPI,
323             VecScale_Seq,
324             VecCopy_Seq, /* 10 */
325             VecSet_Seq,
326             VecSwap_Seq,
327             VecAXPY_Seq,
328             VecAXPBY_Seq,
329             VecMAXPY_Seq,
330             VecAYPX_Seq,
331             VecWAXPY_Seq,
332             VecAXPBYPCZ_Seq,
333             VecPointwiseMult_Seq,
334             VecPointwiseDivide_Seq,
335             VecSetValues_MPI, /* 20 */
336             VecAssemblyBegin_MPI,
337             VecAssemblyEnd_MPI,
338             0,
339             VecGetSize_MPI,
340             VecGetSize_Seq,
341             0,
342             VecMax_MPI,
343             VecMin_MPI,
344             VecSetRandom_Seq,
345             VecSetOption_MPI,
346             VecSetValuesBlocked_MPI,
347             VecDestroy_MPI,
348             VecView_MPI,
349             VecPlaceArray_MPI,
350             VecReplaceArray_Seq,
351             VecDot_Seq,
352             VecTDot_Seq,
353             VecNorm_Seq,
354             VecMDot_Seq,
355             VecMTDot_Seq,
356 	    VecLoad_Default,
357             VecReciprocal_Default,
358             VecConjugate_Seq,
359             0,
360             0,
361             VecResetArray_MPI,
362             0,
363             VecMaxPointwiseDivide_Seq,
364             VecPointwiseMax_Seq,
365             VecPointwiseMaxAbs_Seq,
366             VecPointwiseMin_Seq,
367   	    VecGetValues_MPI,
368     	    0,
369     	    0,
370     	    0,
371     	    0,
372     	    0,
373     	    0,
374    	    VecStrideGather_Default,
375    	    VecStrideScatter_Default
376 };
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "VecCreate_MPI_Private"
380 /*
381     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
382     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
383     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
384 
385     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
386     no space is allocated.
387 */
388 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool  alloc,PetscInt nghost,const PetscScalar array[])
389 {
390   Vec_MPI        *s;
391   PetscErrorCode ierr;
392 
393   PetscFunctionBegin;
394 
395   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
396   v->data        = (void*)s;
397   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
398   s->nghost      = nghost;
399   v->mapping     = 0;
400   v->bmapping    = 0;
401   v->petscnative = PETSC_TRUE;
402 
403   if (v->map->bs == -1) v->map->bs = 1;
404   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
405   s->array           = (PetscScalar *)array;
406   s->array_allocated = 0;
407   if (alloc && !array) {
408     PetscInt n         = v->map->n+nghost;
409     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
410     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
411     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
412     s->array_allocated = s->array;
413   }
414 
415   /* By default parallel vectors do not have local representation */
416   s->localrep    = 0;
417   s->localupdate = 0;
418 
419   v->stash.insertmode  = NOT_SET_VALUES;
420   /* create the stashes. The block-size for bstash is set later when
421      VecSetValuesBlocked is called.
422   */
423   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
424   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
425 
426 #if defined(PETSC_HAVE_MATLAB_ENGINE)
427   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
428   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
429 #endif
430   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 
434 /*MC
435    VECMPI - VECMPI = "mpi" - The basic parallel vector
436 
437    Options Database Keys:
438 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
439 
440   Level: beginner
441 
442 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
443 M*/
444 
445 EXTERN_C_BEGIN
446 #undef __FUNCT__
447 #define __FUNCT__ "VecCreate_MPI"
448 PetscErrorCode  VecCreate_MPI(Vec vv)
449 {
450   PetscErrorCode ierr;
451 
452   PetscFunctionBegin;
453   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
454   PetscFunctionReturn(0);
455 }
456 EXTERN_C_END
457 
458 /*MC
459    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
460 
461    Options Database Keys:
462 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
463 
464   Level: beginner
465 
466 .seealso: VecCreateSeq(), VecCreateMPI()
467 M*/
468 
469 EXTERN_C_BEGIN
470 #undef __FUNCT__
471 #define __FUNCT__ "VecCreate_Standard"
472 PetscErrorCode  VecCreate_Standard(Vec v)
473 {
474   PetscErrorCode ierr;
475   PetscMPIInt    size;
476 
477   PetscFunctionBegin;
478   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
479   if (size == 1) {
480     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
481   } else {
482     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
483   }
484   PetscFunctionReturn(0);
485 }
486 EXTERN_C_END
487 
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "VecCreateMPIWithArray"
491 /*@C
492    VecCreateMPIWithArray - Creates a parallel, array-style vector,
493    where the user provides the array space to store the vector values.
494 
495    Collective on MPI_Comm
496 
497    Input Parameters:
498 +  comm  - the MPI communicator to use
499 .  n     - local vector length, cannot be PETSC_DECIDE
500 .  N     - global vector length (or PETSC_DECIDE to have calculated)
501 -  array - the user provided array to store the vector values
502 
503    Output Parameter:
504 .  vv - the vector
505 
506    Notes:
507    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
508    same type as an existing vector.
509 
510    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
511    at a later stage to SET the array for storing the vector values.
512 
513    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
514    The user should not free the array until the vector is destroyed.
515 
516    Level: intermediate
517 
518    Concepts: vectors^creating with array
519 
520 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
521           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
522 
523 @*/
524 PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
525 {
526   PetscErrorCode ierr;
527 
528   PetscFunctionBegin;
529   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
530   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
531   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
532   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
533   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
534   PetscFunctionReturn(0);
535 }
536 
537 #undef __FUNCT__
538 #define __FUNCT__ "VecCreateGhostWithArray"
539 /*@C
540    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
541    the caller allocates the array space.
542 
543    Collective on MPI_Comm
544 
545    Input Parameters:
546 +  comm - the MPI communicator to use
547 .  n - local vector length
548 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
549 .  nghost - number of local ghost points
550 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
551 -  array - the space to store the vector values (as long as n + nghost)
552 
553    Output Parameter:
554 .  vv - the global vector representation (without ghost points as part of vector)
555 
556    Notes:
557    Use VecGhostGetLocalForm() to access the local, ghosted representation
558    of the vector.
559 
560    This also automatically sets the ISLocalToGlobalMapping() for this vector.
561 
562    Level: advanced
563 
564    Concepts: vectors^creating with array
565    Concepts: vectors^ghosted
566 
567 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
568           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
569           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
570 
571 @*/
572 PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
573 {
574   PetscErrorCode         ierr;
575   Vec_MPI                *w;
576   PetscScalar            *larray;
577   IS                     from,to;
578   ISLocalToGlobalMapping ltog;
579   PetscInt               rstart,i,*indices;
580 
581   PetscFunctionBegin;
582   *vv = 0;
583 
584   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
585   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
586   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
587   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
588   /* Create global representation */
589   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
590   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
591   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
592   w    = (Vec_MPI *)(*vv)->data;
593   /* Create local representation */
594   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
595   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
596   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
597   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
598 
599   /*
600        Create scatter context for scattering (updating) ghost values
601   */
602   ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
603   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
604   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
605   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
606   ierr = ISDestroy(&to);CHKERRQ(ierr);
607   ierr = ISDestroy(&from);CHKERRQ(ierr);
608 
609   /* set local to global mapping for ghosted vector */
610   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
611   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
612   for (i=0; i<n; i++) {
613     indices[i] = rstart + i;
614   }
615   for (i=0; i<nghost; i++) {
616     indices[n+i] = ghosts[i];
617   }
618   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
619   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
620   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
621   PetscFunctionReturn(0);
622 }
623 
624 #undef __FUNCT__
625 #define __FUNCT__ "VecCreateGhost"
626 /*@
627    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
628 
629    Collective on MPI_Comm
630 
631    Input Parameters:
632 +  comm - the MPI communicator to use
633 .  n - local vector length
634 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
635 .  nghost - number of local ghost points
636 -  ghosts - global indices of ghost points
637 
638    Output Parameter:
639 .  vv - the global vector representation (without ghost points as part of vector)
640 
641    Notes:
642    Use VecGhostGetLocalForm() to access the local, ghosted representation
643    of the vector.
644 
645    This also automatically sets the ISLocalToGlobalMapping() for this vector.
646 
647    Level: advanced
648 
649    Concepts: vectors^ghosted
650 
651 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
652           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
653           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
654           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
655 
656 @*/
657 PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
658 {
659   PetscErrorCode ierr;
660 
661   PetscFunctionBegin;
662   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
663   PetscFunctionReturn(0);
664 }
665 
666 
667 /* ------------------------------------------------------------------------------------------*/
668 #undef __FUNCT__
669 #define __FUNCT__ "VecCreateGhostBlockWithArray"
670 /*@C
671    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
672    the caller allocates the array space. Indices in the ghost region are based on blocks.
673 
674    Collective on MPI_Comm
675 
676    Input Parameters:
677 +  comm - the MPI communicator to use
678 .  bs - block size
679 .  n - local vector length
680 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
681 .  nghost - number of local ghost blocks
682 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
683 -  array - the space to store the vector values (as long as n + nghost*bs)
684 
685    Output Parameter:
686 .  vv - the global vector representation (without ghost points as part of vector)
687 
688    Notes:
689    Use VecGhostGetLocalForm() to access the local, ghosted representation
690    of the vector.
691 
692    n is the local vector size (total local size not the number of blocks) while nghost
693    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
694    portion is bs*nghost
695 
696    Level: advanced
697 
698    Concepts: vectors^creating ghosted
699    Concepts: vectors^creating with array
700 
701 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
702           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
703           VecCreateGhostWithArray(), VecCreateGhostBlocked()
704 
705 @*/
706 PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
707 {
708   PetscErrorCode ierr;
709   Vec_MPI        *w;
710   PetscScalar    *larray;
711   IS             from,to;
712   ISLocalToGlobalMapping ltog;
713   PetscInt       rstart,i,nb,*indices;
714 
715   PetscFunctionBegin;
716   *vv = 0;
717 
718   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
719   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
720   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
721   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
722   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
723   /* Create global representation */
724   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
725   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
726   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
727   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
728   w    = (Vec_MPI *)(*vv)->data;
729   /* Create local representation */
730   ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
731   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
732   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
733   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
734   ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);
735 
736   /*
737        Create scatter context for scattering (updating) ghost values
738   */
739   ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
740   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
741   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
742   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
743   ierr = ISDestroy(&to);CHKERRQ(ierr);
744   ierr = ISDestroy(&from);CHKERRQ(ierr);
745 
746   /* set local to global mapping for ghosted vector */
747   nb = n/bs;
748   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
749   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
750   for (i=0; i<nb; i++) {
751     indices[i] = rstart + i*bs;
752   }
753   for (i=0; i<nghost; i++) {
754     indices[nb+i] = ghosts[i];
755   }
756   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
757   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
758   ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "VecCreateGhostBlock"
764 /*@
765    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
766         The indicing of the ghost points is done with blocks.
767 
768    Collective on MPI_Comm
769 
770    Input Parameters:
771 +  comm - the MPI communicator to use
772 .  bs - the block size
773 .  n - local vector length
774 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
775 .  nghost - number of local ghost blocks
776 -  ghosts - global indices of ghost blocks, counts are by block, not by individual index
777 
778    Output Parameter:
779 .  vv - the global vector representation (without ghost points as part of vector)
780 
781    Notes:
782    Use VecGhostGetLocalForm() to access the local, ghosted representation
783    of the vector.
784 
785    n is the local vector size (total local size not the number of blocks) while nghost
786    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
787    portion is bs*nghost
788 
789    Level: advanced
790 
791    Concepts: vectors^ghosted
792 
793 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
794           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
795           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
796 
797 @*/
798 PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
799 {
800   PetscErrorCode ierr;
801 
802   PetscFunctionBegin;
803   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806