xref: /petsc/src/mat/impls/aij/mpi/mpihashmat.h (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 /*
2    used by MPIAIJ, BAIJ and SBAIJ to reduce code duplication
3 
4      define TYPE to AIJ BAIJ or SBAIJ
5             TYPE_SBAIJ for SBAIJ matrix
6 
7 */
8 
9 static PetscErrorCode MatSetValues_MPI_Hash(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
10 {
11   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
12   PetscInt rStart, rEnd, cStart, cEnd;
13 #if defined(TYPE_SBAIJ)
14   PetscInt bs;
15 #endif
16 
17   PetscFunctionBegin;
18   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
19   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
20 #if defined(TYPE_SBAIJ)
21   PetscCall(MatGetBlockSize(A, &bs));
22 #endif
23   for (PetscInt r = 0; r < m; ++r) {
24     PetscScalar value;
25     if (rows[r] < 0) continue;
26     if (rows[r] < rStart || rows[r] >= rEnd) {
27       if (a->roworiented) {
28         PetscCall(MatStashValuesRow_Private(&A->stash, rows[r], n, cols, values + r * n, PETSC_FALSE));
29       } else {
30         PetscCall(MatStashValuesCol_Private(&A->stash, rows[r], n, cols, values + r, m, PETSC_FALSE));
31       }
32     } else {
33       for (PetscInt c = 0; c < n; ++c) {
34 #if defined(TYPE_SBAIJ)
35         if (cols[c] / bs < rows[r] / bs) continue;
36 #else
37         if (cols[c] < 0) continue;
38 #endif
39         value = values ? ((a->roworiented) ? values[r * n + c] : values[r + m * c]) : 0;
40         if (cols[c] >= cStart && cols[c] < cEnd) PetscCall(MatSetValue(a->A, rows[r] - rStart, cols[c] - cStart, value, addv));
41         else PetscCall(MatSetValue(a->B, rows[r] - rStart, cols[c], value, addv));
42       }
43     }
44   }
45   PetscFunctionReturn(PETSC_SUCCESS);
46 }
47 
48 static PetscErrorCode MatAssemblyBegin_MPI_Hash(Mat A, MatAssemblyType type)
49 {
50   PetscInt nstash, reallocs;
51 
52   PetscFunctionBegin;
53   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
54   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
55   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
56   PetscFunctionReturn(PETSC_SUCCESS);
57 }
58 
59 static PetscErrorCode MatAssemblyEnd_MPI_Hash(Mat A, MatAssemblyType type)
60 {
61   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
62   PetscMPIInt  n;
63   PetscScalar *val;
64   PetscInt    *row, *col;
65   PetscInt     j, ncols, flg, rstart;
66 
67   PetscFunctionBegin;
68   while (1) {
69     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
70     if (!flg) break;
71 
72     for (PetscInt i = 0; i < n;) {
73       /* Now identify the consecutive vals belonging to the same row */
74       for (j = i, rstart = row[j]; j < n; j++) {
75         if (row[j] != rstart) break;
76       }
77       if (j < n) ncols = j - i;
78       else ncols = n - i;
79       /* Now assemble all these values with a single function call */
80       PetscCall(MatSetValues_MPI_Hash(A, 1, row + i, ncols, col + i, val + i, A->insertmode));
81       i = j;
82     }
83   }
84   PetscCall(MatStashScatterEnd_Private(&A->stash));
85   if (type != MAT_FINAL_ASSEMBLY) PetscFunctionReturn(PETSC_SUCCESS);
86 
87   A->insertmode = NOT_SET_VALUES; /* this was set by the previous calls to MatSetValues() */
88 
89   PetscCall(PetscMemcpy(&A->ops, &a->cops, sizeof(*(A->ops))));
90   A->hash_active = PETSC_FALSE;
91 
92   PetscCall(MatAssemblyBegin(a->A, MAT_FINAL_ASSEMBLY));
93   PetscCall(MatAssemblyEnd(a->A, MAT_FINAL_ASSEMBLY));
94   PetscCall(MatAssemblyBegin(a->B, MAT_FINAL_ASSEMBLY));
95   PetscCall(MatAssemblyEnd(a->B, MAT_FINAL_ASSEMBLY));
96   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
97   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
98   PetscFunctionReturn(PETSC_SUCCESS);
99 }
100 
101 static PetscErrorCode MatDestroy_MPI_Hash(Mat A)
102 {
103   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
104 
105   PetscFunctionBegin;
106   PetscCall(MatStashDestroy_Private(&A->stash));
107   PetscCall(MatDestroy(&a->A));
108   PetscCall(MatDestroy(&a->B));
109   PetscCall((*a->cops.destroy)(A));
110   PetscFunctionReturn(PETSC_SUCCESS);
111 }
112 
113 static PetscErrorCode MatZeroEntries_MPI_Hash(Mat A)
114 {
115   PetscFunctionBegin;
116   PetscFunctionReturn(PETSC_SUCCESS);
117 }
118 
119 static PetscErrorCode MatSetRandom_MPI_Hash(Mat A, PetscRandom r)
120 {
121   PetscFunctionBegin;
122   SETERRQ(PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Must set preallocation first");
123   PetscFunctionReturn(PETSC_SUCCESS);
124 }
125 
126 static PetscErrorCode MatSetUp_MPI_Hash(Mat A)
127 {
128   PetscConcat(Mat_MPI, TYPE) *a = (PetscConcat(Mat_MPI, TYPE) *)A->data;
129   PetscMPIInt size;
130 #if !defined(TYPE_AIJ)
131   PetscInt bs;
132 #endif
133 
134   PetscFunctionBegin;
135   PetscCall(PetscInfo(A, "Using hash-based MatSetValues() for MATMPISBAIJ because no preallocation provided\n"));
136   PetscCall(PetscLayoutSetUp(A->rmap));
137   PetscCall(PetscLayoutSetUp(A->cmap));
138   if (A->rmap->bs < 1) A->rmap->bs = 1;
139   if (A->cmap->bs < 1) A->cmap->bs = 1;
140   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
141 
142 #if !defined(TYPE_AIJ)
143   PetscCall(MatGetBlockSize(A, &bs));
144   /* these values are set in MatMPISBAIJSetPreallocation() */
145   a->bs2 = bs * bs;
146   a->mbs = A->rmap->n / bs;
147   a->nbs = A->cmap->n / bs;
148   a->Mbs = A->rmap->N / bs;
149   a->Nbs = A->cmap->N / bs;
150 
151   for (PetscInt i = 0; i <= a->size; i++) a->rangebs[i] = A->rmap->range[i] / bs;
152   a->rstartbs = A->rmap->rstart / bs;
153   a->rendbs   = A->rmap->rend / bs;
154   a->cstartbs = A->cmap->rstart / bs;
155   a->cendbs   = A->cmap->rend / bs;
156   PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject)A), A->rmap->bs, &A->bstash));
157 #endif
158 
159   PetscCall(MatCreate(PETSC_COMM_SELF, &a->A));
160   PetscCall(MatSetSizes(a->A, A->rmap->n, A->cmap->n, A->rmap->n, A->cmap->n));
161   PetscCall(MatSetBlockSizesFromMats(a->A, A, A));
162 #if defined(SUB_TYPE_CUSPARSE)
163   PetscCall(MatSetType(a->A, MATSEQAIJCUSPARSE));
164 #else
165   PetscCall(MatSetType(a->A, PetscConcat(MATSEQ, TYPE)));
166 #endif
167   PetscCall(MatSetUp(a->A));
168 
169   PetscCall(MatCreate(PETSC_COMM_SELF, &a->B));
170   PetscCall(MatSetSizes(a->B, A->rmap->n, size > 1 ? A->cmap->N : 0, A->rmap->n, size > 1 ? A->cmap->N : 0));
171   PetscCall(MatSetBlockSizesFromMats(a->B, A, A));
172 #if defined(TYPE_SBAIJ)
173   PetscCall(MatSetType(a->B, MATSEQBAIJ));
174 #else
175   #if defined(SUB_TYPE_CUSPARSE)
176   PetscCall(MatSetType(a->B, MATSEQAIJCUSPARSE));
177   #else
178   PetscCall(MatSetType(a->B, PetscConcat(MATSEQ, TYPE)));
179   #endif
180 #endif
181   PetscCall(MatSetUp(a->B));
182 
183   /* keep a record of the operations so they can be reset when the hash handling is complete */
184   PetscCall(PetscMemcpy(&a->cops, &A->ops, sizeof(*(A->ops))));
185 
186   A->ops->assemblybegin    = MatAssemblyBegin_MPI_Hash;
187   A->ops->assemblyend      = MatAssemblyEnd_MPI_Hash;
188   A->ops->setvalues        = MatSetValues_MPI_Hash;
189   A->ops->destroy          = MatDestroy_MPI_Hash;
190   A->ops->zeroentries      = MatZeroEntries_MPI_Hash;
191   A->ops->setrandom        = MatSetRandom_MPI_Hash;
192   A->ops->setvaluesblocked = NULL;
193 
194   A->preallocated = PETSC_TRUE;
195   A->hash_active  = PETSC_TRUE;
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198