xref: /petsc/include/petscsf.h (revision 2f5fb4c2c059e0d44be1a3804a401471a450aa2c)
1936c5a86SJed Brown /*
2936c5a86SJed Brown    A star forest (SF) describes a communication pattern
3936c5a86SJed Brown */
4936c5a86SJed Brown #if !defined(__PETSCSF_H)
5936c5a86SJed Brown #define __PETSCSF_H
62c8e378dSBarry Smith #include <petscsys.h>
70c312b8eSJed Brown #include <petscsftypes.h>
8936c5a86SJed Brown 
9014dd563SJed Brown PETSC_EXTERN PetscClassId PETSCSF_CLASSID;
10936c5a86SJed Brown 
115af141bcSJed Brown /*J
125af141bcSJed Brown     PetscSFType - String with the name of a PetscSF method or the creation function
135af141bcSJed Brown        with an optional dynamic library name, for example
145af141bcSJed Brown        http://www.mcs.anl.gov/petsc/lib.so:mysfcreate()
155af141bcSJed Brown 
165af141bcSJed Brown    Level: beginner
175af141bcSJed Brown 
18ed658588SBarry Smith    Notes: The two approaches provided are
19ed658588SBarry Smith $     PETSCSFBASIC which uses MPI 1 message passing to perform the communication and
20ed658588SBarry Smith $     PETSCSFWINDOW which uses MPI 2 one-sided operations to perform the communication, this may be more efficient,
21ed658588SBarry Smith $                   but may not be available for all MPI distributions. In particular OpenMPI has bugs in its one-sided
22ed658588SBarry Smith $                   operations that prevent its use.
23ed658588SBarry Smith 
245af141bcSJed Brown .seealso: PetscSFSetType(), PetscSF
255af141bcSJed Brown J*/
265af141bcSJed Brown typedef const char *PetscSFType;
27ac762476SJed Brown #define PETSCSFBASIC  "basic"
28ed658588SBarry Smith #define PETSCSFWINDOW "window"
295af141bcSJed Brown 
30936c5a86SJed Brown /*E
315af141bcSJed Brown     PetscSFWindowSyncType - Type of synchronization for PETSCSFWINDOW
32936c5a86SJed Brown 
335af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_FENCE - simplest model, synchronizing across communicator
345af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_LOCK - passive model, less synchronous, requires less setup than PETSCSF_WINDOW_SYNC_ACTIVE, but may require more handshakes
355af141bcSJed Brown $  PETSCSF_WINDOW_SYNC_ACTIVE - active model, provides most information to MPI implementation, needs to construct 2-way process groups (more setup than PETSCSF_WINDOW_SYNC_LOCK)
36936c5a86SJed Brown 
37e84a5f06SJed Brown    Level: advanced
38936c5a86SJed Brown 
39e84a5f06SJed Brown .seealso: PetscSFWindowSetSyncType(), PetscSFWindowGetSyncType()
40936c5a86SJed Brown E*/
415af141bcSJed Brown typedef enum {PETSCSF_WINDOW_SYNC_FENCE,PETSCSF_WINDOW_SYNC_LOCK,PETSCSF_WINDOW_SYNC_ACTIVE} PetscSFWindowSyncType;
425af141bcSJed Brown PETSC_EXTERN const char *const PetscSFWindowSyncTypes[];
43936c5a86SJed Brown 
44e84a5f06SJed Brown /*E
45e84a5f06SJed Brown     PetscSFDuplicateOption - Aspects to preserve when duplicating a PetscSF
46e84a5f06SJed Brown 
47e84a5f06SJed Brown $  PETSCSF_DUPLICATE_CONFONLY - configuration only, user must call PetscSFSetGraph()
48e84a5f06SJed Brown $  PETSCSF_DUPLICATE_RANKS - communication ranks preserved, but different graph (allows simpler setup after calling PetscSFSetGraph())
49e84a5f06SJed Brown $  PETSCSF_DUPLICATE_GRAPH - entire graph duplicated
50e84a5f06SJed Brown 
51e84a5f06SJed Brown    Level: beginner
52e84a5f06SJed Brown 
53e84a5f06SJed Brown .seealso: PetscSFDuplicate()
54e84a5f06SJed Brown E*/
55e84a5f06SJed Brown typedef enum {PETSCSF_DUPLICATE_CONFONLY,PETSCSF_DUPLICATE_RANKS,PETSCSF_DUPLICATE_GRAPH} PetscSFDuplicateOption;
56e84a5f06SJed Brown PETSC_EXTERN const char *const PetscSFDuplicateOptions[];
57090c6444SJed Brown 
58adc40e5bSBarry Smith PETSC_EXTERN PetscFunctionList PetscSFList;
59bdf89e91SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFRegister(const char[],PetscErrorCode (*)(PetscSF));
605af141bcSJed Brown 
61607a6623SBarry Smith PETSC_EXTERN PetscErrorCode PetscSFInitializePackage(void);
62014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFFinalizePackage(void);
63014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF*);
64014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDestroy(PetscSF*);
655af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetType(PetscSF,PetscSFType);
66014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFView(PetscSF,PetscViewer);
675af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetUp(PetscSF);
68014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetFromOptions(PetscSF);
69e84a5f06SJed Brown PETSC_EXTERN PetscErrorCode PetscSFDuplicate(PetscSF,PetscSFDuplicateOption,PetscSF*);
705af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowSetSyncType(PetscSF,PetscSFWindowSyncType);
715af141bcSJed Brown PETSC_EXTERN PetscErrorCode PetscSFWindowGetSyncType(PetscSF,PetscSFWindowSyncType*);
72014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetRankOrder(PetscSF,PetscBool);
7363f4a732SJed Brown PETSC_EXTERN PetscErrorCode PetscSFSetGraph(PetscSF,PetscInt,PetscInt,const PetscInt*,PetscCopyMode,const PetscSFNode*,PetscCopyMode);
74014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetGraph(PetscSF,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote);
75f723732fSJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetLeafRange(PetscSF,PetscInt*,PetscInt*);
76014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF,PetscInt nroots,const PetscInt *selected,PetscSF *newsf);
77*2f5fb4c2SMatthew G. Knepley PETSC_EXTERN PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF,PetscInt,const PetscInt *, PetscSF *);
78014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFReset(PetscSF);
795b6cb184SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetRanks(PetscSF,PetscInt*,const PetscMPIInt**,const PetscInt**,const PetscInt**,const PetscInt**);
80014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetGroups(PetscSF,MPI_Group*,MPI_Group*);
81014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGetMultiSF(PetscSF,PetscSF*);
82014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFCreateInverseSF(PetscSF,PetscSF*);
83936c5a86SJed Brown 
84936c5a86SJed Brown /* broadcasts rootdata to leafdata */
85894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFBcastBegin(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata)
86894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
87894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFBcastEnd(PetscSF,MPI_Datatype,const void *rootdata,void *leafdata)
88894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
89936c5a86SJed Brown /* Reduce leafdata into rootdata using provided operation */
90894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFReduceBegin(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op)
9119436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
92894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFReduceEnd(PetscSF,MPI_Datatype,const void *leafdata,void *rootdata,MPI_Op)
9319436ca2SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
94936c5a86SJed Brown /* Atomically modifies (using provided operation) rootdata using leafdata from each leaf, value at root at time of modification is returned in leafupdate. */
95894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpBegin(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op)
96894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
97894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFFetchAndOpEnd(PetscSF,MPI_Datatype,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op)
98894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2) PetscAttrMPIPointerWithType(5,2);
99936c5a86SJed Brown /* Compute the degree of every root vertex (number of leaves in its star) */
100014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeBegin(PetscSF,const PetscInt **degree);
101014dd563SJed Brown PETSC_EXTERN PetscErrorCode PetscSFComputeDegreeEnd(PetscSF,const PetscInt **degree);
102936c5a86SJed Brown /* Concatenate data from all leaves into roots */
103894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGatherBegin(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata)
104894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
105894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFGatherEnd(PetscSF,MPI_Datatype,const void *leafdata,void *multirootdata)
106894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
107936c5a86SJed Brown /* Distribute distinct values to each leaf from roots */
108894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFScatterBegin(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata)
109894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
110894dd566SJed Brown PETSC_EXTERN PetscErrorCode PetscSFScatterEnd(PetscSF,MPI_Datatype,const void *multirootdata,void *leafdata)
111894dd566SJed Brown   PetscAttrMPIPointerWithType(3,2) PetscAttrMPIPointerWithType(4,2);
112936c5a86SJed Brown 
113a7b3aa13SAta Mesgarnejad PETSC_EXTERN PetscErrorCode PetscSFCompose(PetscSF,PetscSF,PetscSF*);
114a7b3aa13SAta Mesgarnejad 
1158bfbc91cSJed Brown #if defined(MPI_REPLACE)
1168bfbc91cSJed Brown #  define MPIU_REPLACE MPI_REPLACE
1178bfbc91cSJed Brown #else
1188bfbc91cSJed Brown /* When using an old MPI such that MPI_REPLACE is not defined, we do not pass MPI_REPLACE to MPI at all.  Instead, we
1198bfbc91cSJed Brown  * use it as a flag for our own reducer in the PETSCSFBASIC implementation.  This could be any unique value unlikely to
1208bfbc91cSJed Brown  * collide with another MPI_Op so we'll just use the value that has been used by every version of MPICH since
1218bfbc91cSJed Brown  * MPICH2-1.0.6. */
1228bfbc91cSJed Brown #  define MPIU_REPLACE (MPI_Op)(0x5800000d)
1238bfbc91cSJed Brown #endif
1248bfbc91cSJed Brown 
125936c5a86SJed Brown #endif
126