xref: /petsc/include/petscsf.h (revision 8fa9e22e65c6f47173f479ecf868ef7fe40cdcbd)
1936c5a86SJed Brown /*
2936c5a86SJed Brown    A star forest (SF) describes a communication pattern
3936c5a86SJed Brown */
426bd1501SBarry Smith #if !defined(PETSCSF_H)
526bd1501SBarry Smith #define PETSCSF_H
62c8e378dSBarry Smith #include <petscsys.h>
70c312b8eSJed Brown #include <petscsftypes.h>
897929ea7SJunchao Zhang #include <petscvec.h> /* for Vec, VecScatter etc */
9936c5a86SJed Brown 
10014dd563SJed Brown PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
11936c5a86SJed Brown 
12ac762476SJed Brown #define PETSCSFBASIC      "basic"
13027eff8aSJunchao Zhang #define PETSCSFNEIGHBOR   "neighbor"
14dd5b3ca6SJunchao Zhang #define PETSCSFALLGATHERV "allgatherv"
15dd5b3ca6SJunchao Zhang #define PETSCSFALLGATHER  "allgather"
16dd5b3ca6SJunchao Zhang #define PETSCSFGATHERV    "gatherv"
17dd5b3ca6SJunchao Zhang #define PETSCSFGATHER     "gather"
18dd5b3ca6SJunchao Zhang #define PETSCSFALLTOALL   "alltoall"
19ed658588SBarry Smith #define PETSCSFWINDOW     "window"
205af141bcSJed Brown 
21936c5a86SJed Brown /*E
22dd5b3ca6SJunchao Zhang    PetscSFPattern - Pattern of the PetscSF graph
23dd5b3ca6SJunchao Zhang 
24dd5b3ca6SJunchao Zhang $  PETSCSF_PATTERN_GENERAL   - A general graph. One sets the graph with PetscSFSetGraph() and usually does not use this enum directly.
25dd5b3ca6SJunchao Zhang $  PETSCSF_PATTERN_ALLGATHER - A graph that every rank gathers all roots from all ranks (like MPI_Allgather/v). One sets the graph with PetscSFSetGraphWithPattern().
26dd5b3ca6SJunchao Zhang $  PETSCSF_PATTERN_GATHER    - A graph that rank 0 gathers all roots from all ranks (like MPI_Gather/v with root=0). One sets the graph with PetscSFSetGraphWithPattern().
27dd5b3ca6SJunchao Zhang $  PETSCSF_PATTERN_ALLTOALL  - A graph that every rank gathers different roots from all ranks (like MPI_Alltoall). One sets the graph with PetscSFSetGraphWithPattern().
28dd5b3ca6SJunchao Zhang                                In an ALLTOALL graph, we assume each process has <size> leaves and <size> roots, with each leaf connecting to a remote root. Here <size> is
29dd5b3ca6SJunchao Zhang                                the size of the communicator. This does not mean one can not communicate multiple data items between a pair of processes. One just needs to
30dd5b3ca6SJunchao Zhang                                create a new MPI datatype for the multiple data items, e.g., by MPI_Type_contiguous.
31dd5b3ca6SJunchao Zhang    Level: beginner
32dd5b3ca6SJunchao Zhang 
33dd5b3ca6SJunchao Zhang .seealso: PetscSFSetGraph(), PetscSFSetGraphWithPattern()
34dd5b3ca6SJunchao Zhang E*/
35dd5b3ca6SJunchao Zhang typedef enum {PETSCSF_PATTERN_GENERAL=0,PETSCSF_PATTERN_ALLGATHER,PETSCSF_PATTERN_GATHER,PETSCSF_PATTERN_ALLTOALL} PetscSFPattern;
36dd5b3ca6SJunchao Zhang 
37dd5b3ca6SJunchao Zhang /*E
385af141bcSJed Brown     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
39936c5a86SJed Brown 
405af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
415af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
425af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
43936c5a86SJed Brown 
44e84a5f06SJed Brown    Level: advanced
45936c5a86SJed Brown 
46e84a5f06SJed Brown .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
47936c5a86SJed Brown E*/
485af141bcSJed Brown typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
495af141bcSJed Brown PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
50936c5a86SJed Brown 
51e84a5f06SJed Brown /*E
525b0d146aSStefano Zampini     PetscSFWindowFlavorType - Flavor for the creation of MPI windows for PETSCSFWINDOW
535b0d146aSStefano Zampini 
545b0d146aSStefano Zampini $  PETSCSF_WINDOW_FLAVOR_CREATE - Use MPI_Win_create, no reusage
555b0d146aSStefano Zampini $  PETSCSF_WINDOW_FLAVOR_DYNAMIC - Use MPI_Win_create_dynamic and dynamically attach pointers
565b0d146aSStefano Zampini $  PETSCSF_WINDOW_FLAVOR_ALLOCATE - Use MPI_Win_allocate
575b0d146aSStefano Zampini $  PETSCSF_WINDOW_FLAVOR_SHARED - Use MPI_Win_allocate_shared
585b0d146aSStefano Zampini 
595b0d146aSStefano Zampini    Level: advanced
605b0d146aSStefano Zampini 
615b0d146aSStefano Zampini .seealso: PetscSFWindowSetFlavorType(), PetscSFWindowGetFlavorType()
625b0d146aSStefano Zampini E*/
635b0d146aSStefano Zampini typedef enum {PETSCSF_WINDOW_FLAVOR_CREATE,PETSCSF_WINDOW_FLAVOR_DYNAMIC,PETSCSF_WINDOW_FLAVOR_ALLOCATE,PETSCSF_WINDOW_FLAVOR_SHARED} PetscSFWindowFlavorType;
645b0d146aSStefano Zampini PETSC_EXTERN const char *const PetscSFWindowFlavorTypes[];
655b0d146aSStefano Zampini 
665b0d146aSStefano Zampini /*E
67e84a5f06SJed Brown     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
68e84a5f06SJed Brown 
69e84a5f06SJed Brown $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
70e84a5f06SJed Brown $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
71e84a5f06SJed Brown $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
72e84a5f06SJed Brown 
73e84a5f06SJed Brown    Level: beginner
74e84a5f06SJed Brown 
75e84a5f06SJed Brown .seealso: PetscSFDuplicate()
76e84a5f06SJed Brown E*/
77e84a5f06SJed Brown typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
78e84a5f06SJed Brown PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
79090c6444SJed Brown 
80adc40e5bSBarry Smith PETSC_EXTERN PetscFunctionList PetscSFList;
81bdf89e91SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
825af141bcSJed Brown 
83607a6623SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
84014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
8579c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm,PetscSF*);
86014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
875af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
8829046d53SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFGetType(PetscSF,PetscSFType*);
89014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
90fe2efc57SMark PETSC_EXTERN PetscErrorCode PetscSFViewFromOptions(PetscSF,PetscObject,const char[]);
915af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
92014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
93e84a5f06SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
945af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
955af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
965b0d146aSStefano Zampini PETSC_EXTERN PetscErrorCode PetscSFWindowSetFlavorType(PetscSF,PetscSFWindowFlavorType);
975b0d146aSStefano Zampini PETSC_EXTERN PetscErrorCode PetscSFWindowGetFlavorType(PetscSF,PetscSFWindowFlavorType*);
982eb0eadbSSatish Balay PETSC_EXTERN PetscErrorCode PetscSFWindowSetInfo(PetscSF,MPI_Info);
992eb0eadbSSatish Balay PETSC_EXTERN PetscErrorCode PetscSFWindowGetInfo(PetscSF,MPI_Info*);
100014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
10163f4a732SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
102dd5b3ca6SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFSetGraphWithPattern(PetscSF,PetscLayout,PetscSFPattern);
10379c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt*,PetscInt*,const PetscInt**,const PetscSFNode**);
104f723732fSJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
10579c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt,const PetscInt*,PetscSF*);
1062f5fb4c2SMatthew G. Knepley PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
107014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
108b5a8e515SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetUpRanks(PetscSF,MPI_Group);
109dec1416fSJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFGetRootRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
1108750ddebSJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFGetLeafRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**);
111014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
112014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
113014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
114936c5a86SJed Brown 
115*8fa9e22eSVaclav Hapla /* Build PetscSF from PetscLayout */
116b0c7db22SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFSetGraphLayout(PetscSF,PetscLayout,PetscInt,const PetscInt*,PetscCopyMode,const PetscInt*);
117*8fa9e22eSVaclav Hapla PETSC_EXTERN PetscErrorCode PetscSFCreateFromLayouts(PetscLayout,PetscLayout,PetscSF*);
118*8fa9e22eSVaclav Hapla PETSC_DEPRECATED_FUNCTION("Use PetscSFCreateFromLayouts (since v3.15)")
119*8fa9e22eSVaclav Hapla PETSC_STATIC_INLINE PetscErrorCode PetscLayoutsCreateSF(PetscLayout rmap, PetscLayout lmap, PetscSF* sf) {
120*8fa9e22eSVaclav Hapla   return PetscSFCreateFromLayouts(rmap, lmap, sf);
121*8fa9e22eSVaclav Hapla }
122*8fa9e22eSVaclav Hapla 
123*8fa9e22eSVaclav Hapla /* PetscSection interoperability */
124b0c7db22SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFSetGraphSection(PetscSF,PetscSection,PetscSection);
125b0c7db22SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFCreateRemoteOffsets(PetscSF, PetscSection, PetscSection, PetscInt **);
126b0c7db22SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFDistributeSection(PetscSF, PetscSection, PetscInt **, PetscSection);
127b0c7db22SLisandro Dalcin PETSC_EXTERN PetscErrorCode PetscSFCreateSectionSF(PetscSF, PetscSection, PetscInt [], PetscSection, PetscSF *);
128b0c7db22SLisandro Dalcin 
1293482bfa8SJunchao Zhang /* Reduce rootdata to leafdata using provided operation */
1303482bfa8SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpBegin(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
1313482bfa8SJunchao Zhang   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
1323482bfa8SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
1333482bfa8SJunchao Zhang   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
134d0295fc0SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFBcastAndOpWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void*,MPI_Op)
135d0295fc0SJunchao Zhang   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
136d0295fc0SJunchao Zhang 
137936c5a86SJed Brown /* Reduce leafdata into rootdata using provided operation */
13879c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void*,void *,MPI_Op)
13919436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
14079c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void*,void*,MPI_Op)
14119436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
142d0295fc0SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFReduceWithMemTypeBegin(PetscSF,MPI_Datatype,PetscMemType,const void*,PetscMemType,void *,MPI_Op)
143d0295fc0SJunchao Zhang   PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(6,2);
144936c5a86SJed Brown /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
14579c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
146894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
14779c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op)
148894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
149936c5a86SJed Brown /* Compute the degree of every root vertex (number of leaves in its star) */
15079c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt**);
15179c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt**);
15266dfcd1aSVaclav Hapla PETSC_EXTERN PetscErrorCode PetscSFComputeMultiRootOriginalNumbering(PetscSF,const PetscInt[],PetscInt*,PetscInt*[]);
153936c5a86SJed Brown /* Concatenate data from all leaves into roots */
15479c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void*,void*)
155894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
15679c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void*,void*)
157894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
158936c5a86SJed Brown /* Distribute distinct values to each leaf from roots */
15979c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void*,void*)
160894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
16179c40355SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void*,void*)
162894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
163936c5a86SJed Brown 
164a7b3aa13SAta Mesgarnejad PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
16504c0ada0SJunchao Zhang PETSC_EXTERN PetscErrorCode PetscSFComposeInverse(PetscSF,PetscSF,PetscSF*);
166a7b3aa13SAta Mesgarnejad 
1678bfbc91cSJed Brown #if defined(MPI_REPLACE)
1688bfbc91cSJed Brown #  define MPIU_REPLACE MPI_REPLACE
1698bfbc91cSJed Brown #else
1708bfbc91cSJed Brown /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
1718bfbc91cSJed Brown  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
1728bfbc91cSJed Brown  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
1738bfbc91cSJed Brown  * MPICH2-1.0.6. */
1748bfbc91cSJed Brown #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
1758bfbc91cSJed Brown #endif
1768bfbc91cSJed Brown 
177dec1416fSJunchao Zhang PETSC_DEPRECATED_FUNCTION("Use PetscSFGetRootRanks (since v3.12)")
178dec1416fSJunchao Zhang PETSC_STATIC_INLINE PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote) {
179dec1416fSJunchao Zhang   return PetscSFGetRootRanks(sf,nranks,ranks,roffset,rmine,rremote);
180dec1416fSJunchao Zhang }
181e2652d4cSJunchao Zhang 
182c73c85a0SVaclav Hapla /*@C
183c73c85a0SVaclav Hapla    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
184c73c85a0SVaclav Hapla 
185c73c85a0SVaclav Hapla    Collective on PetscSF
186c73c85a0SVaclav Hapla 
187c73c85a0SVaclav Hapla    Input Arguments:
188c73c85a0SVaclav Hapla +  sf - star forest on which to communicate
189c73c85a0SVaclav Hapla .  unit - data type associated with each node
190c73c85a0SVaclav Hapla -  rootdata - buffer to broadcast
191c73c85a0SVaclav Hapla 
192c73c85a0SVaclav Hapla    Output Arguments:
193c73c85a0SVaclav Hapla .  leafdata - buffer to update with values from each leaf's respective root
194c73c85a0SVaclav Hapla 
195c73c85a0SVaclav Hapla    Level: intermediate
196c73c85a0SVaclav Hapla 
1978eeec653SVaclav Hapla .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin(), PetscSFBcastAndOpBegin()
198c73c85a0SVaclav Hapla @*/
199e2652d4cSJunchao Zhang PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
200e2652d4cSJunchao Zhang   return PetscSFBcastAndOpBegin(sf,unit,rootdata,leafdata,MPIU_REPLACE);
201e2652d4cSJunchao Zhang }
202c73c85a0SVaclav Hapla 
203d0295fc0SJunchao Zhang PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastWithMemTypeBegin(PetscSF sf,MPI_Datatype unit,PetscMemType rootmtype,const void* rootdata,PetscMemType leafmtype,void* leafdata) {
204d0295fc0SJunchao Zhang   return PetscSFBcastAndOpWithMemTypeBegin(sf,unit,rootmtype,rootdata,leafmtype,leafdata,MPIU_REPLACE);
205d0295fc0SJunchao Zhang }
206d0295fc0SJunchao Zhang 
207c73c85a0SVaclav Hapla /*@C
208c73c85a0SVaclav Hapla    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
209c73c85a0SVaclav Hapla 
210c73c85a0SVaclav Hapla    Collective
211c73c85a0SVaclav Hapla 
212c73c85a0SVaclav Hapla    Input Arguments:
213c73c85a0SVaclav Hapla +  sf - star forest
214c73c85a0SVaclav Hapla .  unit - data type
215c73c85a0SVaclav Hapla -  rootdata - buffer to broadcast
216c73c85a0SVaclav Hapla 
217c73c85a0SVaclav Hapla    Output Arguments:
218c73c85a0SVaclav Hapla .  leafdata - buffer to update with values from each leaf's respective root
219c73c85a0SVaclav Hapla 
220c73c85a0SVaclav Hapla    Level: intermediate
221c73c85a0SVaclav Hapla 
222c73c85a0SVaclav Hapla .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
223c73c85a0SVaclav Hapla @*/
224e2652d4cSJunchao Zhang PETSC_STATIC_INLINE PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void* rootdata,void* leafdata) {
225e2652d4cSJunchao Zhang   return PetscSFBcastAndOpEnd(sf,unit,rootdata,leafdata,MPIU_REPLACE);
226e2652d4cSJunchao Zhang }
227e2652d4cSJunchao Zhang 
228936c5a86SJed Brown #endif
229