xref: /petsc/src/dm/impls/network/networkview.c (revision c6a7a37075f8bf8d34d92c4910d42445b7a3482d)
1d2fd5932SHong Zhang #include <petsc/private/dmnetworkimpl.h> /*I  "petscdmnetwork.h"  I*/
2d2fd5932SHong Zhang 
3df1a93feSDuncan Campbell static PetscErrorCode DMView_Network_CSV(DM dm, PetscViewer viewer)
4df1a93feSDuncan Campbell {
5df1a93feSDuncan Campbell   DM              dmcoords;
6df1a93feSDuncan Campbell   PetscInt        nsubnets, i, subnet, nvertices, nedges, vertex, edge;
7df1a93feSDuncan Campbell   PetscInt        vertexOffsets[2], globalEdgeVertices[2];
8df1a93feSDuncan Campbell   PetscScalar     vertexCoords[2];
9df1a93feSDuncan Campbell   const PetscInt *vertices, *edges, *edgeVertices;
10df1a93feSDuncan Campbell   Vec             allVertexCoords;
11df1a93feSDuncan Campbell   PetscMPIInt     rank;
12df1a93feSDuncan Campbell   MPI_Comm        comm;
13df1a93feSDuncan Campbell 
14df1a93feSDuncan Campbell   PetscFunctionBegin;
15df1a93feSDuncan Campbell   // Get the network containing coordinate information
16df1a93feSDuncan Campbell   PetscCall(DMGetCoordinateDM(dm, &dmcoords));
17df1a93feSDuncan Campbell   // Get the coordinate vector for the network
18df1a93feSDuncan Campbell   PetscCall(DMGetCoordinatesLocal(dm, &allVertexCoords));
19df1a93feSDuncan Campbell   // Get the MPI communicator and this process' rank
20df1a93feSDuncan Campbell   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
21df1a93feSDuncan Campbell   PetscCallMPI(MPI_Comm_rank(comm, &rank));
22df1a93feSDuncan Campbell   // Start synchronized printing
23df1a93feSDuncan Campbell   PetscCall(PetscViewerASCIIPushSynchronized(viewer));
24df1a93feSDuncan Campbell 
25df1a93feSDuncan Campbell   // Write the header
26df1a93feSDuncan Campbell   PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Type,Rank,ID,X,Y,Z,Name,Color\n"));
27df1a93feSDuncan Campbell 
28df1a93feSDuncan Campbell   // Iterate each subnetwork (Note: We need to get the global number of subnets apparently)
29df1a93feSDuncan Campbell   PetscCall(DMNetworkGetNumSubNetworks(dm, NULL, &nsubnets));
30df1a93feSDuncan Campbell   for (subnet = 0; subnet < nsubnets; subnet++) {
31df1a93feSDuncan Campbell     // Get the subnetwork's vertices and edges
32df1a93feSDuncan Campbell     PetscCall(DMNetworkGetSubnetwork(dm, subnet, &nvertices, &nedges, &vertices, &edges));
33df1a93feSDuncan Campbell 
34df1a93feSDuncan Campbell     // Write out each vertex
35df1a93feSDuncan Campbell     for (i = 0; i < nvertices; i++) {
36df1a93feSDuncan Campbell       vertex = vertices[i];
37df1a93feSDuncan Campbell       // Get the offset into the coordinate vector for the vertex
38df1a93feSDuncan Campbell       PetscCall(DMNetworkGetLocalVecOffset(dmcoords, vertex, ALL_COMPONENTS, vertexOffsets));
39df1a93feSDuncan Campbell       vertexOffsets[1] = vertexOffsets[0] + 1;
40df1a93feSDuncan Campbell       // Remap vertex to the global value
41df1a93feSDuncan Campbell       PetscCall(DMNetworkGetGlobalVertexIndex(dm, vertex, &vertex));
42df1a93feSDuncan Campbell       // Get the vertex position from the coordinate vector
43df1a93feSDuncan Campbell       PetscCall(VecGetValues(allVertexCoords, 2, vertexOffsets, vertexCoords));
44df1a93feSDuncan Campbell 
45df1a93feSDuncan Campbell       // TODO: Determine vertex color/name
46df1a93feSDuncan Campbell       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Node,%" PetscInt_FMT ",%" PetscInt_FMT ",%lf,%lf,0,%" PetscInt_FMT "\n", (PetscInt)rank, vertex, (double)PetscRealPart(vertexCoords[0]), (double)PetscRealPart(vertexCoords[1]), vertex));
47df1a93feSDuncan Campbell     }
48df1a93feSDuncan Campbell 
49df1a93feSDuncan Campbell     // Write out each edge
50df1a93feSDuncan Campbell     for (i = 0; i < nedges; i++) {
51df1a93feSDuncan Campbell       edge = edges[i];
52df1a93feSDuncan Campbell       PetscCall(DMNetworkGetConnectedVertices(dm, edge, &edgeVertices));
53df1a93feSDuncan Campbell       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[0], &globalEdgeVertices[0]));
54df1a93feSDuncan Campbell       PetscCall(DMNetworkGetGlobalVertexIndex(dm, edgeVertices[1], &globalEdgeVertices[1]));
55df1a93feSDuncan Campbell       PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edge, &edge));
56df1a93feSDuncan Campbell 
57df1a93feSDuncan Campbell       // TODO: Determine edge color/name
58df1a93feSDuncan Campbell       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Edge,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",0,%" PetscInt_FMT "\n", (PetscInt)rank, edge, globalEdgeVertices[0], globalEdgeVertices[1], edge));
59df1a93feSDuncan Campbell     }
60df1a93feSDuncan Campbell   }
61df1a93feSDuncan Campbell   // End synchronized printing
62df1a93feSDuncan Campbell   PetscCall(PetscViewerFlush(viewer));
63df1a93feSDuncan Campbell   PetscCall(PetscViewerASCIIPopSynchronized(viewer));
64df1a93feSDuncan Campbell   PetscFunctionReturn(PETSC_SUCCESS);
65df1a93feSDuncan Campbell }
66df1a93feSDuncan Campbell 
67df1a93feSDuncan Campbell #include <petscdraw.h>
68df1a93feSDuncan Campbell static PetscErrorCode DMView_Network_Matplotlib(DM dm, PetscViewer viewer)
69df1a93feSDuncan Campbell {
70df1a93feSDuncan Campbell   PetscMPIInt rank, size, rank2;
71df1a93feSDuncan Campbell   MPI_Comm    comm;
72df1a93feSDuncan Campbell   char        filename[PETSC_MAX_PATH_LEN + 1], proccall[PETSC_MAX_PATH_LEN + 500], scriptFile[PETSC_MAX_PATH_LEN + 1], streamBuffer[256];
73df1a93feSDuncan Campbell   PetscViewer csvViewer;
74df1a93feSDuncan Campbell   FILE       *processFile = NULL;
75df1a93feSDuncan Campbell   PetscBool   isnull;
76df1a93feSDuncan Campbell   PetscDraw   draw;
77df1a93feSDuncan Campbell 
78df1a93feSDuncan Campbell   PetscFunctionBegin;
79df1a93feSDuncan Campbell   // Deal with the PetscDraw we are given
80df1a93feSDuncan Campbell   PetscCall(PetscViewerDrawGetDraw(viewer, 1, &draw));
81df1a93feSDuncan Campbell   PetscCall(PetscDrawIsNull(draw, &isnull));
82df1a93feSDuncan Campbell   PetscCall(PetscDrawSetVisible(draw, PETSC_FALSE));
83df1a93feSDuncan Campbell 
84df1a93feSDuncan Campbell   // Clear the file name buffer so all communicated bytes are well-defined
85df1a93feSDuncan Campbell   PetscCall(PetscMemzero(filename, sizeof(filename)));
86df1a93feSDuncan Campbell 
87df1a93feSDuncan Campbell   // Get the MPI communicator and this process' rank
88df1a93feSDuncan Campbell   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
89df1a93feSDuncan Campbell   PetscCallMPI(MPI_Comm_rank(comm, &rank));
90df1a93feSDuncan Campbell   PetscCallMPI(MPI_Comm_size(comm, &size));
91df1a93feSDuncan Campbell 
92df1a93feSDuncan Campbell   // Generate and broadcast the temporary file name from rank 0
93df1a93feSDuncan Campbell   if (rank == 0) {
94df1a93feSDuncan Campbell #if defined(PETSC_HAVE_TMPNAM_S)
95df1a93feSDuncan Campbell     // Acquire a temporary file to write to and open an ASCII/CSV viewer
96df1a93feSDuncan Campbell     PetscCheck(tmpnam_s(filename, sizeof(filename)) == 0, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
97df1a93feSDuncan Campbell #elif defined(PETSC_HAVE_MKSTEMP) && __STDC_VERSION__ > 199901L
98df1a93feSDuncan Campbell     size_t numChars;
99df1a93feSDuncan Campbell     // Same thing, but for POSIX systems on which tmpnam is deprecated
100df1a93feSDuncan Campbell     // Note: Configure may detect mkstemp but it will not be defined if compiling for C99, so check additional defines to see if we can use it
101*c6a7a370SJeremy L Thompson     PetscCall(PetscStrncpy(filename, "/tmp/", sizeof(filename)));
102df1a93feSDuncan Campbell     // Mkstemp requires us to explicitly specify part of the path, but some systems may not like putting files in /tmp/ so have an option for it
103df1a93feSDuncan Campbell     PetscCall(PetscOptionsGetString(NULL, NULL, "-dmnetwork_view_tmpdir", filename, sizeof(filename), NULL));
104df1a93feSDuncan Campbell     // Make sure the filename ends with a '/'
105df1a93feSDuncan Campbell     PetscCall(PetscStrlen(filename, &numChars));
106df1a93feSDuncan Campbell     if (filename[numChars - 1] != '/') {
107df1a93feSDuncan Campbell       filename[numChars]     = '/';
108df1a93feSDuncan Campbell       filename[numChars + 1] = 0;
109df1a93feSDuncan Campbell     }
110df1a93feSDuncan Campbell     // Perform the actual temporary file creation
111*c6a7a370SJeremy L Thompson     PetscCall(PetscStrlcat(filename, "XXXXXX", sizeof(filename)));
112df1a93feSDuncan Campbell     PetscCheck(mkstemp(filename) != -1, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
113df1a93feSDuncan Campbell #else
114df1a93feSDuncan Campbell     // Same thing, but for older C versions which don't have the safe form
115df1a93feSDuncan Campbell     PetscCheck(tmpnam(filename) != NULL, comm, PETSC_ERR_SYS, "Could not acquire temporary file");
116df1a93feSDuncan Campbell #endif
117df1a93feSDuncan Campbell     // Broadcast the filename to all other MPI ranks
118df1a93feSDuncan Campbell     for (rank2 = 1; rank2 < size; rank2++) PetscCallMPI(MPI_Send(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, rank2, 0, comm));
119df1a93feSDuncan Campbell   } else {
120df1a93feSDuncan Campbell     // Receive the file name
121df1a93feSDuncan Campbell     PetscCallMPI(MPI_Recv(filename, PETSC_MAX_PATH_LEN, MPI_BYTE, 0, 0, comm, MPI_STATUS_IGNORE));
122df1a93feSDuncan Campbell   }
123df1a93feSDuncan Campbell 
124df1a93feSDuncan Campbell   PetscCall(PetscViewerASCIIOpen(PETSC_COMM_WORLD, filename, &csvViewer));
125df1a93feSDuncan Campbell   PetscCall(PetscViewerPushFormat(csvViewer, PETSC_VIEWER_ASCII_CSV));
126df1a93feSDuncan Campbell 
127df1a93feSDuncan Campbell   // Use the CSV viewer to write out the local network
128df1a93feSDuncan Campbell   PetscCall(DMView_Network_CSV(dm, csvViewer));
129df1a93feSDuncan Campbell 
130df1a93feSDuncan Campbell   // Close the viewer
131df1a93feSDuncan Campbell   PetscCall(PetscViewerDestroy(&csvViewer));
132df1a93feSDuncan Campbell 
133df1a93feSDuncan Campbell   // Get the value of $PETSC_DIR
134df1a93feSDuncan Campbell   PetscCall(PetscStrreplace(PETSC_COMM_WORLD, "${PETSC_DIR}/share/petsc/bin/dmnetwork_view.py", scriptFile, sizeof(scriptFile)));
135df1a93feSDuncan Campbell   PetscCall(PetscFixFilename(scriptFile, scriptFile));
136df1a93feSDuncan Campbell   // Generate the system call for 'python3 $PETSC_DIR/share/petsc/dmnetwork_view.py <file>'
137df1a93feSDuncan Campbell   PetscCall(PetscArrayzero(proccall, sizeof(proccall)));
138df1a93feSDuncan Campbell   PetscCall(PetscSNPrintf(proccall, sizeof(proccall), "%s %s %s %s", PETSC_PYTHON_EXE, scriptFile, (isnull ? "-tx" : ""), filename));
139df1a93feSDuncan Campbell 
140df1a93feSDuncan Campbell #if defined(PETSC_HAVE_POPEN)
141df1a93feSDuncan Campbell   // Perform the call to run the python script (Note: while this is called on all ranks POpen will only run on rank 0)
142df1a93feSDuncan Campbell   PetscCall(PetscPOpen(PETSC_COMM_WORLD, NULL, proccall, "r", &processFile));
143df1a93feSDuncan Campbell   if (processFile != NULL) {
144df1a93feSDuncan Campbell     while (fgets(streamBuffer, sizeof(streamBuffer), processFile) != NULL) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "%s", streamBuffer));
145df1a93feSDuncan Campbell   }
146df1a93feSDuncan Campbell   PetscCall(PetscPClose(PETSC_COMM_WORLD, processFile));
147df1a93feSDuncan Campbell #else
148df1a93feSDuncan Campbell   // Same thing, but using the standard library for systems that don't have POpen/PClose (only run on rank 0)
149df1a93feSDuncan Campbell   if (rank == 0) {
150df1a93feSDuncan Campbell     PetscCheck(system(proccall) == 0, comm, PETSC_ERR_SYS, "Failed to call viewer script");
151df1a93feSDuncan Campbell     // Barrier so that all ranks wait until the call completes
152df1a93feSDuncan Campbell     PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
153df1a93feSDuncan Campbell   }
154df1a93feSDuncan Campbell #endif
155df1a93feSDuncan Campbell   // Clean up the temporary file we used using rank 0
156df1a93feSDuncan Campbell   if (rank == 0) PetscCheck(remove(filename) == 0, comm, PETSC_ERR_SYS, "Failed to delete temporary file");
157df1a93feSDuncan Campbell   PetscFunctionReturn(PETSC_SUCCESS);
158df1a93feSDuncan Campbell }
159df1a93feSDuncan Campbell 
160d2fd5932SHong Zhang PetscErrorCode DMView_Network(DM dm, PetscViewer viewer)
161d2fd5932SHong Zhang {
162df1a93feSDuncan Campbell   PetscBool         iascii, isdraw;
163df1a93feSDuncan Campbell   PetscViewerFormat format;
164d2fd5932SHong Zhang 
165d2fd5932SHong Zhang   PetscFunctionBegin;
166d2fd5932SHong Zhang   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
167d2fd5932SHong Zhang   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
168df1a93feSDuncan Campbell   PetscCall(PetscViewerGetFormat(viewer, &format));
169df1a93feSDuncan Campbell 
170df1a93feSDuncan Campbell   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw));
171df1a93feSDuncan Campbell   if (isdraw) {
172df1a93feSDuncan Campbell     PetscCall(DMView_Network_Matplotlib(dm, viewer));
173df1a93feSDuncan Campbell     PetscFunctionReturn(PETSC_SUCCESS);
174df1a93feSDuncan Campbell   }
175df1a93feSDuncan Campbell 
176d2fd5932SHong Zhang   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
177d2fd5932SHong Zhang   if (iascii) {
178d2fd5932SHong Zhang     const PetscInt *cone, *vtx, *edges;
179d2fd5932SHong Zhang     PetscInt        vfrom, vto, i, j, nv, ne, nsv, p, nsubnet;
180d2fd5932SHong Zhang     DM_Network     *network = (DM_Network *)dm->data;
181df1a93feSDuncan Campbell     PetscMPIInt     rank;
182df1a93feSDuncan Campbell 
183df1a93feSDuncan Campbell     PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
184df1a93feSDuncan Campbell     if (format == PETSC_VIEWER_ASCII_CSV) {
185df1a93feSDuncan Campbell       PetscCall(DMView_Network_CSV(dm, viewer));
186df1a93feSDuncan Campbell       PetscFunctionReturn(PETSC_SUCCESS);
187df1a93feSDuncan Campbell     }
188d2fd5932SHong Zhang 
189d2fd5932SHong Zhang     nsubnet = network->cloneshared->Nsubnet; /* num of subnetworks */
190df1a93feSDuncan Campbell     if (!rank) {
191d2fd5932SHong Zhang       PetscCall(PetscPrintf(PETSC_COMM_SELF, "  NSubnets: %" PetscInt_FMT "; NEdges: %" PetscInt_FMT "; NVertices: %" PetscInt_FMT "; NSharedVertices: %" PetscInt_FMT ".\n", nsubnet, network->cloneshared->NEdges, network->cloneshared->NVertices,
192d2fd5932SHong Zhang                             network->cloneshared->Nsvtx));
193d2fd5932SHong Zhang     }
194d2fd5932SHong Zhang 
195d2fd5932SHong Zhang     PetscCall(DMNetworkGetSharedVertices(dm, &nsv, NULL));
196d2fd5932SHong Zhang     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
197d2fd5932SHong Zhang     PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "  [%d] nEdges: %" PetscInt_FMT "; nVertices: %" PetscInt_FMT "; nSharedVertices: %" PetscInt_FMT "\n", rank, network->cloneshared->nEdges, network->cloneshared->nVertices, nsv));
198d2fd5932SHong Zhang 
199d2fd5932SHong Zhang     for (i = 0; i < nsubnet; i++) {
200d2fd5932SHong Zhang       PetscCall(DMNetworkGetSubnetwork(dm, i, &nv, &ne, &vtx, &edges));
201d2fd5932SHong Zhang       if (ne) {
202d2fd5932SHong Zhang         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     Subnet %" PetscInt_FMT ": nEdges %" PetscInt_FMT ", nVertices(include shared vertices) %" PetscInt_FMT "\n", i, ne, nv));
203d2fd5932SHong Zhang         for (j = 0; j < ne; j++) {
204d2fd5932SHong Zhang           p = edges[j];
205d2fd5932SHong Zhang           PetscCall(DMNetworkGetConnectedVertices(dm, p, &cone));
206d2fd5932SHong Zhang           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[0], &vfrom));
207d2fd5932SHong Zhang           PetscCall(DMNetworkGetGlobalVertexIndex(dm, cone[1], &vto));
208d2fd5932SHong Zhang           PetscCall(DMNetworkGetGlobalEdgeIndex(dm, edges[j], &p));
209d2fd5932SHong Zhang           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       edge %" PetscInt_FMT ": %" PetscInt_FMT " ----> %" PetscInt_FMT "\n", p, vfrom, vto));
210d2fd5932SHong Zhang         }
211d2fd5932SHong Zhang       }
212d2fd5932SHong Zhang     }
213d2fd5932SHong Zhang 
214d2fd5932SHong Zhang     /* Shared vertices */
215d2fd5932SHong Zhang     PetscCall(DMNetworkGetSharedVertices(dm, NULL, &vtx));
216d2fd5932SHong Zhang     if (nsv) {
217d2fd5932SHong Zhang       PetscInt        gidx;
218d2fd5932SHong Zhang       PetscBool       ghost;
219d2fd5932SHong Zhang       const PetscInt *sv = NULL;
220d2fd5932SHong Zhang 
221d2fd5932SHong Zhang       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "     SharedVertices:\n"));
222d2fd5932SHong Zhang       for (i = 0; i < nsv; i++) {
223d2fd5932SHong Zhang         PetscCall(DMNetworkIsGhostVertex(dm, vtx[i], &ghost));
224d2fd5932SHong Zhang         if (ghost) continue;
225d2fd5932SHong Zhang 
226d2fd5932SHong Zhang         PetscCall(DMNetworkSharedVertexGetInfo(dm, vtx[i], &gidx, &nv, &sv));
227d2fd5932SHong Zhang         PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "       svtx %" PetscInt_FMT ": global index %" PetscInt_FMT ", subnet[%" PetscInt_FMT "].%" PetscInt_FMT " ---->\n", i, gidx, sv[0], sv[1]));
228d2fd5932SHong Zhang         for (j = 1; j < nv; j++) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "                                           ----> subnet[%" PetscInt_FMT "].%" PetscInt_FMT "\n", sv[2 * j], sv[2 * j + 1]));
229d2fd5932SHong Zhang       }
230d2fd5932SHong Zhang     }
231d2fd5932SHong Zhang     PetscCall(PetscViewerFlush(viewer));
232d2fd5932SHong Zhang     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
233d2fd5932SHong Zhang   } else PetscCheck(iascii, PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Viewer type %s not yet supported for DMNetwork writing", ((PetscObject)viewer)->type_name);
234d2fd5932SHong Zhang   PetscFunctionReturn(PETSC_SUCCESS);
235d2fd5932SHong Zhang }
236