xref: /petsc/src/mat/tests/ex170.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[] = "Scalable algorithm for Connected Components problem.\n\
2 Entails changing the MatMult() for this matrix.\n\n\n";
3 
4 #include <petscmat.h>
5 
6 PETSC_EXTERN PetscErrorCode MatMultMax_SeqAIJ(Mat,Vec,Vec);
7 PETSC_EXTERN PetscErrorCode MatMultAddMax_SeqAIJ(Mat,Vec,Vec,Vec);
8 #include <../src/mat/impls/aij/mpi/mpiaij.h>
9 
10 /*
11   Paper with Ananth: Frbenius norm of band was good proxy, but really want to know the rank outside
12 
13   LU for diagonal blocks must do shifting instead of pivoting, preferably shifting individual rows (like Pardiso)
14 
15   Draw picture of flow of reordering
16 
17   Measure Forbenius norm of the blocks being dropped by Truncated SPIKE (might be contaminated by pivoting in LU)
18 
19   Report on using Florida matrices (Maxim, Murat)
20 */
21 
22 /*
23 I have thought about how to do this. Here is a prototype algorithm. Let A be
24 the adjacency matrix (0 or 1), and let each component be identified by the
25 lowest numbered vertex in it. We initialize a vector c so that each vertex is
26 a component, c_i = i. Now we act on c with A, using a special product
27 
28   c = A * c
29 
30 where we replace addition with min. The fixed point of this operation is a vector
31 c which is the component for each vertex. The number of iterates is
32 
33   max_{components} depth of BFS tree for component
34 
35 We can accelerate this algorithm by preprocessing all locals domains using the
36 same algorithm. Then the number of iterations is bounded the depth of the BFS
37 tree for the graph on supervertices defined over local components, which is
38 bounded by p. In practice, this should be very fast.
39 */
40 
41 /* Only isolated vertices get a 1 on the diagonal */
42 PetscErrorCode CreateGraph(MPI_Comm comm, PetscInt testnum, Mat *A)
43 {
44   Mat            G;
45   PetscErrorCode ierr;
46 
47   PetscFunctionBegin;
48   CHKERRQ(MatCreate(comm, &G));
49   /* The identity matrix */
50   switch (testnum) {
51   case 0:
52   {
53     Vec D;
54 
55     CHKERRQ(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
56     CHKERRQ(MatSetUp(G));
57     CHKERRQ(MatCreateVecs(G, &D, NULL));
58     CHKERRQ(VecSet(D, 1.0));
59     CHKERRQ(MatDiagonalSet(G, D, INSERT_VALUES));
60     CHKERRQ(VecDestroy(&D));
61   }
62   break;
63   case 1:
64   {
65     PetscScalar vals[3] = {1.0, 1.0, 1.0};
66     PetscInt    cols[3];
67     PetscInt    rStart, rEnd, row;
68 
69     CHKERRQ(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
70     CHKERRQ(MatSetFromOptions(G));
71     CHKERRQ(MatSeqAIJSetPreallocation(G, 2, NULL));
72     CHKERRQ(MatSetUp(G));
73     CHKERRQ(MatGetOwnershipRange(G, &rStart, &rEnd));
74     row  = 0;
75     cols[0] = 0; cols[1] = 1;
76     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
77     row  = 1;
78     cols[0] = 0; cols[1] = 1;
79     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
80     row  = 2;
81     cols[0] = 2; cols[1] = 3;
82     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
83     row  = 3;
84     cols[0] = 3; cols[1] = 4;
85     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
86     row  = 4;
87     cols[0] = 4; cols[1] = 2;
88     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
89     CHKERRQ(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
90     CHKERRQ(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
91   }
92   break;
93   case 2:
94   {
95     PetscScalar vals[3] = {1.0, 1.0, 1.0};
96     PetscInt    cols[3];
97     PetscInt    rStart, rEnd, row;
98 
99     CHKERRQ(MatSetSizes(G, PETSC_DETERMINE, PETSC_DETERMINE, 5, 5));
100     CHKERRQ(MatSetFromOptions(G));
101     CHKERRQ(MatSeqAIJSetPreallocation(G, 2, NULL));
102     CHKERRQ(MatSetUp(G));
103     CHKERRQ(MatGetOwnershipRange(G, &rStart, &rEnd));
104     row  = 0;
105     cols[0] = 0; cols[1] = 4;
106     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
107     row  = 1;
108     cols[0] = 1; cols[1] = 2;
109     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
110     row  = 2;
111     cols[0] = 2; cols[1] = 3;
112     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
113     row  = 3;
114     cols[0] = 3; cols[1] = 1;
115     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
116     row  = 4;
117     cols[0] = 0; cols[1] = 4;
118     if ((row >= rStart) && (row < rEnd)) CHKERRQ(MatSetValues(G, 1, &row, 2, cols, vals, INSERT_VALUES));
119     CHKERRQ(MatAssemblyBegin(G, MAT_FINAL_ASSEMBLY));
120     CHKERRQ(MatAssemblyEnd(G, MAT_FINAL_ASSEMBLY));
121   }
122   break;
123   default:
124     SETERRQ(comm, PETSC_ERR_PLIB, "Unknown test %d", testnum);
125   }
126   *A = G;
127   PetscFunctionReturn(0);
128 }
129 
130 int main(int argc, char **argv)
131 {
132   MPI_Comm       comm;
133   Mat            A;    /* A graph */
134   Vec            c;    /* A vector giving the component of each vertex */
135   Vec            cold; /* The vector c from the last iteration */
136   PetscScalar   *carray;
137   PetscInt       testnum = 0;
138   PetscInt       V, vStart, vEnd, v, n;
139   PetscMPIInt    size;
140   PetscErrorCode ierr;
141 
142   ierr = PetscInitialize(&argc, &argv, NULL,help);if (ierr) return ierr;
143   comm = PETSC_COMM_WORLD;
144   CHKERRMPI(MPI_Comm_size(comm, &size));
145   /* Use matrix to encode a graph */
146   CHKERRQ(PetscOptionsGetInt(NULL,NULL, "-testnum", &testnum, NULL));
147   CHKERRQ(CreateGraph(comm, testnum, &A));
148   CHKERRQ(MatGetSize(A, &V, NULL));
149   /* Replace matrix-vector multiplication with one that calculates the minimum rather than the sum */
150   if (size == 1) {
151     CHKERRQ(MatShellSetOperation(A, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
152   } else {
153     Mat_MPIAIJ *a = (Mat_MPIAIJ *) A->data;
154 
155     CHKERRQ(MatShellSetOperation(a->A, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
156     CHKERRQ(MatShellSetOperation(a->B, MATOP_MULT, (void (*)) MatMultMax_SeqAIJ));
157     CHKERRQ(MatShellSetOperation(a->B, MATOP_MULT_ADD, (void (*)) MatMultAddMax_SeqAIJ));
158   }
159   /* Initialize each vertex as a separate component */
160   CHKERRQ(MatCreateVecs(A, &c, NULL));
161   CHKERRQ(MatGetOwnershipRange(A, &vStart, &vEnd));
162   CHKERRQ(VecGetArray(c, &carray));
163   for (v = vStart; v < vEnd; ++v) {
164     carray[v-vStart] = v;
165   }
166   CHKERRQ(VecRestoreArray(c, &carray));
167   /* Preprocess in parallel to find local components */
168   /* Multiply until c does not change */
169   CHKERRQ(VecDuplicate(c, &cold));
170   for (v = 0; v < V; ++v) {
171     Vec       cnew = cold;
172     PetscBool stop;
173 
174     CHKERRQ(MatMult(A, c, cnew));
175     CHKERRQ(VecEqual(c, cnew, &stop));
176     if (stop) break;
177     cold = c;
178     c    = cnew;
179   }
180   /* Report */
181   CHKERRQ(VecUniqueEntries(c, &n, NULL));
182   CHKERRQ(PetscPrintf(comm, "Components: %d Iterations: %d\n", n, v));
183   CHKERRQ(VecView(c, PETSC_VIEWER_STDOUT_WORLD));
184   /* Cleanup */
185   CHKERRQ(VecDestroy(&c));
186   CHKERRQ(VecDestroy(&cold));
187   ierr = PetscFinalize();
188   return ierr;
189 }
190