xref: /petsc/src/vec/vec/impls/mpi/pbvec.c (revision 812af9f3ac2ea7d5a8e4b00e36af681f20625899)
1 #define PETSCVEC_DLL
2 /*
3    This file contains routines for Parallel vector operations.
4  */
5 #include "../src/vec/vec/impls/mpi/pvecimpl.h"   /*I  "petscvec.h"   I*/
6 
7 #if 0
8 #undef __FUNCT__
9 #define __FUNCT__ "VecPublish_MPI"
10 static PetscErrorCode VecPublish_MPI(PetscObject obj)
11 {
12   PetscFunctionBegin;
13   PetscFunctionReturn(0);
14 }
15 #endif
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "VecDot_MPI"
19 PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20 {
21   PetscScalar    sum,work;
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   ierr = VecDot_Seq(xin,yin,&work);CHKERRQ(ierr);
26   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
27   *z = sum;
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "VecTDot_MPI"
33 PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34 {
35   PetscScalar    sum,work;
36   PetscErrorCode ierr;
37 
38   PetscFunctionBegin;
39   ierr = VecTDot_Seq(xin,yin,&work);CHKERRQ(ierr);
40   ierr = MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);CHKERRQ(ierr);
41   *z   = sum;
42   PetscFunctionReturn(0);
43 }
44 
45 #undef __FUNCT__
46 #define __FUNCT__ "VecSetOption_MPI"
47 PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48 {
49   PetscFunctionBegin;
50   if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51     v->stash.donotstash = flag;
52   } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53     v->stash.ignorenegidx = flag;
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
59 EXTERN_C_BEGIN
60 EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
61 EXTERN_C_END
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "VecPlaceArray_MPI"
65 PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66 {
67   PetscErrorCode ierr;
68   Vec_MPI        *v = (Vec_MPI *)vin->data;
69 
70   PetscFunctionBegin;
71   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
73   v->array = (PetscScalar *)a;
74   if (v->localrep) {
75     ierr = VecPlaceArray(v->localrep,a);CHKERRQ(ierr);
76   }
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "VecResetArray_MPI"
82 PetscErrorCode VecResetArray_MPI(Vec vin)
83 {
84   Vec_MPI        *v = (Vec_MPI *)vin->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   v->array         = v->unplacedarray;
89   v->unplacedarray = 0;
90   if (v->localrep) {
91     ierr = VecResetArray(v->localrep);CHKERRQ(ierr);
92   }
93   PetscFunctionReturn(0);
94 }
95 
96 EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
97 
98 static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
99             VecDuplicateVecs_Default,
100             VecDestroyVecs_Default,
101             VecDot_MPI,
102             VecMDot_MPI,
103             VecNorm_MPI,
104             VecTDot_MPI,
105             VecMTDot_MPI,
106             VecScale_Seq,
107             VecCopy_Seq, /* 10 */
108             VecSet_Seq,
109             VecSwap_Seq,
110             VecAXPY_Seq,
111             VecAXPBY_Seq,
112             VecMAXPY_Seq,
113             VecAYPX_Seq,
114             VecWAXPY_Seq,
115             VecAXPBYPCZ_Seq,
116             VecPointwiseMult_Seq,
117             VecPointwiseDivide_Seq,
118             VecSetValues_MPI, /* 20 */
119             VecAssemblyBegin_MPI,
120             VecAssemblyEnd_MPI,
121             VecGetArray_Seq,
122             VecGetSize_MPI,
123             VecGetSize_Seq,
124             VecRestoreArray_Seq,
125             VecMax_MPI,
126             VecMin_MPI,
127             VecSetRandom_Seq,
128             VecSetOption_MPI,
129             VecSetValuesBlocked_MPI,
130             VecDestroy_MPI,
131             VecView_MPI,
132             VecPlaceArray_MPI,
133             VecReplaceArray_Seq,
134             VecDot_Seq,
135             VecTDot_Seq,
136             VecNorm_Seq,
137             VecMDot_Seq,
138             VecMTDot_Seq,
139 	    VecLoad_Default,
140             VecReciprocal_Default,
141             VecConjugate_Seq,
142             0,
143             0,
144             VecResetArray_MPI,
145             0,
146             VecMaxPointwiseDivide_Seq,
147             VecPointwiseMax_Seq,
148             VecPointwiseMaxAbs_Seq,
149             VecPointwiseMin_Seq,
150   	    VecGetValues_MPI,
151     	    0,
152     	    0,
153     	    0,
154     	    0,
155     	    0,
156     	    0,
157    	    VecStrideGather_Default,
158    	    VecStrideScatter_Default
159 };
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "VecCreate_MPI_Private"
163 /*
164     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
165     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
166     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
167 
168     If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
169     no space is allocated.
170 */
171 PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
172 {
173   Vec_MPI        *s;
174   PetscErrorCode ierr;
175 
176   PetscFunctionBegin;
177 
178   ierr           = PetscNewLog(v,Vec_MPI,&s);CHKERRQ(ierr);
179   v->data        = (void*)s;
180   ierr           = PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));CHKERRQ(ierr);
181   s->nghost      = nghost;
182   v->mapping     = 0;
183   v->bmapping    = 0;
184   v->petscnative = PETSC_TRUE;
185 
186   if (v->map->bs == -1) v->map->bs = 1;
187   ierr = PetscLayoutSetUp(v->map);CHKERRQ(ierr);
188   s->array           = (PetscScalar *)array;
189   s->array_allocated = 0;
190   if (alloc && !array) {
191     PetscInt n         = v->map->n+nghost;
192     ierr               = PetscMalloc(n*sizeof(PetscScalar),&s->array);CHKERRQ(ierr);
193     ierr               = PetscLogObjectMemory(v,n*sizeof(PetscScalar));CHKERRQ(ierr);
194     ierr               = PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));CHKERRQ(ierr);
195     s->array_allocated = s->array;
196   }
197 
198   /* By default parallel vectors do not have local representation */
199   s->localrep    = 0;
200   s->localupdate = 0;
201 
202   v->stash.insertmode  = NOT_SET_VALUES;
203   /* create the stashes. The block-size for bstash is set later when
204      VecSetValuesBlocked is called.
205   */
206   ierr = VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);CHKERRQ(ierr);
207   ierr = VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);CHKERRQ(ierr);
208 
209 #if defined(PETSC_HAVE_MATLAB_ENGINE)
210   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);CHKERRQ(ierr);
211   ierr = PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);CHKERRQ(ierr);
212 #endif
213   ierr = PetscObjectChangeTypeName((PetscObject)v,VECMPI);CHKERRQ(ierr);
214   ierr = PetscPublishAll(v);CHKERRQ(ierr);
215   PetscFunctionReturn(0);
216 }
217 
218 /*MC
219    VECMPI - VECMPI = "mpi" - The basic parallel vector
220 
221    Options Database Keys:
222 . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
223 
224   Level: beginner
225 
226 .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
227 M*/
228 
229 EXTERN_C_BEGIN
230 #undef __FUNCT__
231 #define __FUNCT__ "VecCreate_MPI"
232 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_MPI(Vec vv)
233 {
234   PetscErrorCode ierr;
235 
236   PetscFunctionBegin;
237   ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 EXTERN_C_END
241 
242 /*MC
243    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
244 
245    Options Database Keys:
246 . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions
247 
248   Level: beginner
249 
250 .seealso: VecCreateSeq(), VecCreateMPI()
251 M*/
252 
253 EXTERN_C_BEGIN
254 #undef __FUNCT__
255 #define __FUNCT__ "VecCreate_Standard"
256 PetscErrorCode PETSCMAT_DLLEXPORT VecCreate_Standard(Vec v)
257 {
258   PetscErrorCode ierr;
259   PetscMPIInt    size;
260 
261   PetscFunctionBegin;
262   ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
263   if (size == 1) {
264     ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
265   } else {
266     ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
267   }
268   PetscFunctionReturn(0);
269 }
270 EXTERN_C_END
271 
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "VecCreateMPIWithArray"
275 /*@C
276    VecCreateMPIWithArray - Creates a parallel, array-style vector,
277    where the user provides the array space to store the vector values.
278 
279    Collective on MPI_Comm
280 
281    Input Parameters:
282 +  comm  - the MPI communicator to use
283 .  n     - local vector length, cannot be PETSC_DECIDE
284 .  N     - global vector length (or PETSC_DECIDE to have calculated)
285 -  array - the user provided array to store the vector values
286 
287    Output Parameter:
288 .  vv - the vector
289 
290    Notes:
291    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
292    same type as an existing vector.
293 
294    If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
295    at a later stage to SET the array for storing the vector values.
296 
297    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
298    The user should not free the array until the vector is destroyed.
299 
300    Level: intermediate
301 
302    Concepts: vectors^creating with array
303 
304 .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
305           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
306 
307 @*/
308 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
309 {
310   PetscErrorCode ierr;
311 
312   PetscFunctionBegin;
313   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
314   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
315   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
316   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
317   ierr = VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);CHKERRQ(ierr);
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "VecGhostStateSync_Private"
323 /*
324   This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure
325   that the state is updated if either vector has changed since the last time
326   one of these functions was called.  It could apply to any PetscObject, but
327   VecGhost is quite different from other objects in that two separate vectors
328   look at the same memory.
329 
330   In principle, we could only propagate state to the local vector on
331   GetLocalForm and to the global vector on RestoreLocalForm, but this version is
332   more conservative (i.e. robust against misuse) and simpler.
333 
334   Note that this function is correct and changes nothing if both arguments are the
335   same, which is the case in serial.
336 */
337 static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l)
338 {
339   PetscErrorCode ierr;
340   PetscInt       gstate,lstate;
341 
342   PetscFunctionBegin;
343   ierr = PetscObjectStateQuery((PetscObject)g,&gstate);CHKERRQ(ierr);
344   ierr = PetscObjectStateQuery((PetscObject)l,&lstate);CHKERRQ(ierr);
345   ierr = PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));CHKERRQ(ierr);
346   ierr = PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 #undef __FUNCT__
351 #define __FUNCT__ "VecGhostGetLocalForm"
352 /*@
353     VecGhostGetLocalForm - Obtains the local ghosted representation of
354     a parallel vector created with VecCreateGhost().
355 
356     Not Collective
357 
358     Input Parameter:
359 .   g - the global vector. Vector must be have been obtained with either
360         VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
361 
362     Output Parameter:
363 .   l - the local (ghosted) representation
364 
365     Notes:
366     This routine does not actually update the ghost values, but rather it
367     returns a sequential vector that includes the locations for the ghost
368     values and their current values. The returned vector and the original
369     vector passed in share the same array that contains the actual vector data.
370 
371     One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
372     finished using the object.
373 
374     Level: advanced
375 
376    Concepts: vectors^ghost point access
377 
378 .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
379 
380 @*/
381 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostGetLocalForm(Vec g,Vec *l)
382 {
383   PetscErrorCode ierr;
384   PetscTruth     isseq,ismpi;
385 
386   PetscFunctionBegin;
387   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
388   PetscValidPointer(l,2);
389 
390   ierr = PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);CHKERRQ(ierr);
391   ierr = PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr);
392   if (ismpi) {
393     Vec_MPI *v  = (Vec_MPI*)g->data;
394     if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
395     *l = v->localrep;
396   } else if (isseq) {
397     *l = g;
398   } else {
399     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
400   }
401   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
402   ierr = PetscObjectReference((PetscObject)*l);CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "VecGhostRestoreLocalForm"
408 /*@
409     VecGhostRestoreLocalForm - Restores the local ghosted representation of
410     a parallel vector obtained with VecGhostGetLocalForm().
411 
412     Not Collective
413 
414     Input Parameter:
415 +   g - the global vector
416 -   l - the local (ghosted) representation
417 
418     Notes:
419     This routine does not actually update the ghost values, but rather it
420     returns a sequential vector that includes the locations for the ghost values
421     and their current values.
422 
423     Level: advanced
424 
425 .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
426 @*/
427 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostRestoreLocalForm(Vec g,Vec *l)
428 {
429   PetscErrorCode ierr;
430 
431   PetscFunctionBegin;
432   ierr = VecGhostStateSync_Private(g,*l);CHKERRQ(ierr);
433   ierr = PetscObjectDereference((PetscObject)*l);CHKERRQ(ierr);
434   PetscFunctionReturn(0);
435 }
436 
437 #undef __FUNCT__
438 #define __FUNCT__ "VecGhostUpdateBegin"
439 /*@
440    VecGhostUpdateBegin - Begins the vector scatter to update the vector from
441    local representation to global or global representation to local.
442 
443    Neighbor-wise Collective on Vec
444 
445    Input Parameters:
446 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
447 .  insertmode - one of ADD_VALUES or INSERT_VALUES
448 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
449 
450    Notes:
451    Use the following to update the ghost regions with correct values from the owning process
452 .vb
453        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
454        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
455 .ve
456 
457    Use the following to accumulate the ghost region values onto the owning processors
458 .vb
459        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
460        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
461 .ve
462 
463    To accumulate the ghost region values onto the owning processors and then update
464    the ghost regions correctly, call the later followed by the former, i.e.,
465 .vb
466        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
467        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
468        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
469        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
470 .ve
471 
472    Level: advanced
473 
474 .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
475           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
476 
477 @*/
478 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
479 {
480   Vec_MPI        *v;
481   PetscErrorCode ierr;
482 
483   PetscFunctionBegin;
484   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
485 
486   v  = (Vec_MPI*)g->data;
487   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
488   if (!v->localupdate) PetscFunctionReturn(0);
489 
490   if (scattermode == SCATTER_REVERSE) {
491     ierr = VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
492   } else {
493     ierr = VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
494   }
495   PetscFunctionReturn(0);
496 }
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "VecGhostUpdateEnd"
500 /*@
501    VecGhostUpdateEnd - End the vector scatter to update the vector from
502    local representation to global or global representation to local.
503 
504    Neighbor-wise Collective on Vec
505 
506    Input Parameters:
507 +  g - the vector (obtained with VecCreateGhost() or VecDuplicate())
508 .  insertmode - one of ADD_VALUES or INSERT_VALUES
509 -  scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
510 
511    Notes:
512 
513    Use the following to update the ghost regions with correct values from the owning process
514 .vb
515        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
516        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
517 .ve
518 
519    Use the following to accumulate the ghost region values onto the owning processors
520 .vb
521        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
522        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
523 .ve
524 
525    To accumulate the ghost region values onto the owning processors and then update
526    the ghost regions correctly, call the later followed by the former, i.e.,
527 .vb
528        VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
529        VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
530        VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
531        VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
532 .ve
533 
534    Level: advanced
535 
536 .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
537           VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
538 
539 @*/
540 PetscErrorCode PETSCVEC_DLLEXPORT VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
541 {
542   Vec_MPI        *v;
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(g,VEC_CLASSID,1);
547 
548   v  = (Vec_MPI*)g->data;
549   if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
550   if (!v->localupdate) PetscFunctionReturn(0);
551 
552   if (scattermode == SCATTER_REVERSE) {
553     ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr);
554   } else {
555     ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr);
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 #undef __FUNCT__
561 #define __FUNCT__ "VecCreateGhostWithArray"
562 /*@C
563    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
564    the caller allocates the array space.
565 
566    Collective on MPI_Comm
567 
568    Input Parameters:
569 +  comm - the MPI communicator to use
570 .  n - local vector length
571 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
572 .  nghost - number of local ghost points
573 .  ghosts - global indices of ghost points (or PETSC_NULL if not needed)
574 -  array - the space to store the vector values (as long as n + nghost)
575 
576    Output Parameter:
577 .  vv - the global vector representation (without ghost points as part of vector)
578 
579    Notes:
580    Use VecGhostGetLocalForm() to access the local, ghosted representation
581    of the vector.
582 
583    This also automatically sets the ISLocalToGlobalMapping() for this vector.
584 
585    Level: advanced
586 
587    Concepts: vectors^creating with array
588    Concepts: vectors^ghosted
589 
590 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
591           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
592           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
593 
594 @*/
595 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
596 {
597   PetscErrorCode         ierr;
598   Vec_MPI                *w;
599   PetscScalar            *larray;
600   IS                     from,to;
601   ISLocalToGlobalMapping ltog;
602   PetscInt               rstart,i,*indices;
603 
604   PetscFunctionBegin;
605   *vv = 0;
606 
607   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
608   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
609   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
610   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
611   /* Create global representation */
612   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
613   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
614   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);CHKERRQ(ierr);
615   w    = (Vec_MPI *)(*vv)->data;
616   /* Create local representation */
617   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
618   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
619   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
620   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
621 
622   /*
623        Create scatter context for scattering (updating) ghost values
624   */
625   ierr = ISCreateGeneral(comm,nghost,ghosts,&from);CHKERRQ(ierr);
626   ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
627   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
628   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
629   ierr = ISDestroy(to);CHKERRQ(ierr);
630   ierr = ISDestroy(from);CHKERRQ(ierr);
631 
632   /* set local to global mapping for ghosted vector */
633   ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
634   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
635   for (i=0; i<n; i++) {
636     indices[i] = rstart + i;
637   }
638   for (i=0; i<nghost; i++) {
639     indices[n+i] = ghosts[i];
640   }
641   ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,&ltog);CHKERRQ(ierr);
642   ierr = PetscFree(indices);CHKERRQ(ierr);
643   ierr = VecSetLocalToGlobalMapping(*vv,ltog);CHKERRQ(ierr);
644   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
645   ierr = PetscFree(indices);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "VecCreateGhost"
651 /*@
652    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
653 
654    Collective on MPI_Comm
655 
656    Input Parameters:
657 +  comm - the MPI communicator to use
658 .  n - local vector length
659 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
660 .  nghost - number of local ghost points
661 -  ghosts - global indices of ghost points
662 
663    Output Parameter:
664 .  vv - the global vector representation (without ghost points as part of vector)
665 
666    Notes:
667    Use VecGhostGetLocalForm() to access the local, ghosted representation
668    of the vector.
669 
670    This also automatically sets the ISLocalToGlobalMapping() for this vector.
671 
672    Level: advanced
673 
674    Concepts: vectors^ghosted
675 
676 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
677           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
678           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
679           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
680 
681 @*/
682 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
683 {
684   PetscErrorCode ierr;
685 
686   PetscFunctionBegin;
687   ierr = VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
688   PetscFunctionReturn(0);
689 }
690 
691 #undef __FUNCT__
692 #define __FUNCT__ "VecDuplicate_MPI"
693 PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
694 {
695   PetscErrorCode ierr;
696   Vec_MPI        *vw,*w = (Vec_MPI *)win->data;
697   PetscScalar    *array;
698 
699   PetscFunctionBegin;
700   ierr = VecCreate(((PetscObject)win)->comm,v);CHKERRQ(ierr);
701 
702   /* use the map that exists aleady in win */
703   ierr = PetscLayoutDestroy((*v)->map);CHKERRQ(ierr);
704   (*v)->map = win->map;
705   win->map->refcnt++;
706 
707   ierr = VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);CHKERRQ(ierr);
708   vw   = (Vec_MPI *)(*v)->data;
709   ierr = PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
710 
711   /* save local representation of the parallel vector (and scatter) if it exists */
712   if (w->localrep) {
713     ierr = VecGetArrayPrivate(*v,&array);CHKERRQ(ierr);
714     ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);CHKERRQ(ierr);
715     ierr = PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));CHKERRQ(ierr);
716     ierr = VecRestoreArrayPrivate(*v,&array);CHKERRQ(ierr);
717     ierr = PetscLogObjectParent(*v,vw->localrep);CHKERRQ(ierr);
718     vw->localupdate = w->localupdate;
719     if (vw->localupdate) {
720       ierr = PetscObjectReference((PetscObject)vw->localupdate);CHKERRQ(ierr);
721     }
722   }
723 
724   /* New vector should inherit stashing property of parent */
725   (*v)->stash.donotstash = win->stash.donotstash;
726   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
727 
728   ierr = PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);CHKERRQ(ierr);
729   ierr = PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);CHKERRQ(ierr);
730   if (win->mapping) {
731     ierr = PetscObjectReference((PetscObject)win->mapping);CHKERRQ(ierr);
732     (*v)->mapping = win->mapping;
733   }
734   if (win->bmapping) {
735     ierr = PetscObjectReference((PetscObject)win->bmapping);CHKERRQ(ierr);
736     (*v)->bmapping = win->bmapping;
737   }
738   (*v)->map->bs    = win->map->bs;
739   (*v)->bstash.bs = win->bstash.bs;
740 
741   PetscFunctionReturn(0);
742 }
743 
744 /* ------------------------------------------------------------------------------------------*/
745 #undef __FUNCT__
746 #define __FUNCT__ "VecCreateGhostBlockWithArray"
747 /*@C
748    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
749    the caller allocates the array space. Indices in the ghost region are based on blocks.
750 
751    Collective on MPI_Comm
752 
753    Input Parameters:
754 +  comm - the MPI communicator to use
755 .  bs - block size
756 .  n - local vector length
757 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
758 .  nghost - number of local ghost blocks
759 .  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
760 -  array - the space to store the vector values (as long as n + nghost*bs)
761 
762    Output Parameter:
763 .  vv - the global vector representation (without ghost points as part of vector)
764 
765    Notes:
766    Use VecGhostGetLocalForm() to access the local, ghosted representation
767    of the vector.
768 
769    n is the local vector size (total local size not the number of blocks) while nghost
770    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
771    portion is bs*nghost
772 
773    Level: advanced
774 
775    Concepts: vectors^creating ghosted
776    Concepts: vectors^creating with array
777 
778 .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
779           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
780           VecCreateGhostWithArray(), VecCreateGhostBlocked()
781 
782 @*/
783 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
784 {
785   PetscErrorCode ierr;
786   Vec_MPI        *w;
787   PetscScalar    *larray;
788   IS             from,to;
789   ISLocalToGlobalMapping ltog;
790   PetscInt       rstart,i,nb,*indices;
791 
792   PetscFunctionBegin;
793   *vv = 0;
794 
795   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
796   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
797   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
798   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
799   ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
800   /* Create global representation */
801   ierr = VecCreate(comm,vv);CHKERRQ(ierr);
802   ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
803   ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
804   ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
805   w    = (Vec_MPI *)(*vv)->data;
806   /* Create local representation */
807   ierr = VecGetArrayPrivate(*vv,&larray);CHKERRQ(ierr);
808   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
809   ierr = VecSetBlockSize(w->localrep,bs);CHKERRQ(ierr);
810   ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
811   ierr = VecRestoreArrayPrivate(*vv,&larray);CHKERRQ(ierr);
812 
813   /*
814        Create scatter context for scattering (updating) ghost values
815   */
816   ierr = ISCreateBlock(comm,bs,nghost,ghosts,&from);CHKERRQ(ierr);
817   ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
818   ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
819   ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
820   ierr = ISDestroy(to);CHKERRQ(ierr);
821   ierr = ISDestroy(from);CHKERRQ(ierr);
822 
823   /* set local to global mapping for ghosted vector */
824   nb = n/bs;
825   ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
826   ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
827   for (i=0; i<nb; i++) {
828     indices[i] = rstart + i*bs;
829   }
830   for (i=0; i<nghost; i++) {
831     indices[nb+i] = ghosts[i];
832   }
833   ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,&ltog);CHKERRQ(ierr);
834   ierr = PetscFree(indices);CHKERRQ(ierr);
835   ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
836   ierr = ISLocalToGlobalMappingDestroy(ltog);CHKERRQ(ierr);
837   ierr = PetscFree(indices);CHKERRQ(ierr);
838 
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "VecCreateGhostBlock"
844 /*@
845    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
846         The indicing of the ghost points is done with blocks.
847 
848    Collective on MPI_Comm
849 
850    Input Parameters:
851 +  comm - the MPI communicator to use
852 .  bs - the block size
853 .  n - local vector length
854 .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
855 .  nghost - number of local ghost blocks
856 -  ghosts - global indices of ghost blocks
857 
858    Output Parameter:
859 .  vv - the global vector representation (without ghost points as part of vector)
860 
861    Notes:
862    Use VecGhostGetLocalForm() to access the local, ghosted representation
863    of the vector.
864 
865    n is the local vector size (total local size not the number of blocks) while nghost
866    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
867    portion is bs*nghost
868 
869    Level: advanced
870 
871    Concepts: vectors^ghosted
872 
873 .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
874           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
875           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
876 
877 @*/
878 PetscErrorCode PETSCVEC_DLLEXPORT VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
879 {
880   PetscErrorCode ierr;
881 
882   PetscFunctionBegin;
883   ierr = VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 
887 /*
888     These introduce a ghosted vector where the ghosting is determined by the call to
889   VecSetLocalToGlobalMapping()
890 */
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "VecSetLocalToGlobalMapping_FETI"
894 PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
895 {
896   PetscErrorCode ierr;
897   Vec_MPI        *v = (Vec_MPI *)vv->data;
898 
899   PetscFunctionBegin;
900   v->nghost = map->n - vv->map->n;
901 
902   /* we need to make longer the array space that was allocated when the vector was created */
903   ierr     = PetscFree(v->array_allocated);CHKERRQ(ierr);
904   ierr     = PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);CHKERRQ(ierr);
905   v->array = v->array_allocated;
906 
907   /* Create local representation */
908   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);CHKERRQ(ierr);
909   ierr = PetscLogObjectParent(vv,v->localrep);CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 
914 #undef __FUNCT__
915 #define __FUNCT__ "VecSetValuesLocal_FETI"
916 PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
917 {
918   PetscErrorCode ierr;
919   Vec_MPI        *v = (Vec_MPI *)vv->data;
920 
921   PetscFunctionBegin;
922   ierr = VecSetValues(v->localrep,n,ix,values,mode);CHKERRQ(ierr);
923   PetscFunctionReturn(0);
924 }
925 
926 EXTERN_C_BEGIN
927 #undef __FUNCT__
928 #define __FUNCT__ "VecCreate_FETI"
929 PetscErrorCode PETSCVEC_DLLEXPORT VecCreate_FETI(Vec vv)
930 {
931   PetscErrorCode ierr;
932 
933   PetscFunctionBegin;
934   ierr = VecSetType(vv,VECMPI);CHKERRQ(ierr);
935 
936   /* overwrite the functions to handle setting values locally */
937   vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
938   vv->ops->setvalueslocal          = VecSetValuesLocal_FETI;
939   vv->ops->assemblybegin           = 0;
940   vv->ops->assemblyend             = 0;
941   vv->ops->setvaluesblocked        = 0;
942   vv->ops->setvaluesblocked        = 0;
943 
944   PetscFunctionReturn(0);
945 }
946 EXTERN_C_END
947 
948 
949 
950 
951 
952 
953 
954 
955 
956