xref: /petsc/src/mat/impls/elemental/matelem.cxx (revision 66e17bc34c6358e26731cd86c8afc040b40d70c0)
1 #include <petsc/private/petscelemental.h>
2 
3 /*
4     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6 */
7 static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
8 
9 static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
10 {
11   PetscErrorCode ierr;
12   Mat_Elemental  *a = (Mat_Elemental*)A->data;
13   PetscBool      iascii;
14 
15   PetscFunctionBegin;
16   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
17   if (iascii) {
18     PetscViewerFormat format;
19     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20     if (format == PETSC_VIEWER_ASCII_INFO) {
21       /* call elemental viewing function */
22       ierr = PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");CHKERRQ(ierr);
23       ierr = PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());CHKERRQ(ierr);
24       ierr = PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());CHKERRQ(ierr);
25       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
26         /* call elemental viewing function */
27         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");CHKERRQ(ierr);
28       }
29 
30     } else if (format == PETSC_VIEWER_DEFAULT) {
31       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
32       El::Print( *a->emat, "Elemental matrix (cyclic ordering)");
33       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
34       if (A->factortype == MAT_FACTOR_NONE){
35         Mat Adense;
36         ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
37         ierr = MatView(Adense,viewer);CHKERRQ(ierr);
38         ierr = MatDestroy(&Adense);CHKERRQ(ierr);
39       }
40     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
41   } else {
42     /* convert to dense format and call MatView() */
43     Mat Adense;
44     ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
45     ierr = MatView(Adense,viewer);CHKERRQ(ierr);
46     ierr = MatDestroy(&Adense);CHKERRQ(ierr);
47   }
48   PetscFunctionReturn(0);
49 }
50 
51 static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
52 {
53   Mat_Elemental  *a = (Mat_Elemental*)A->data;
54 
55   PetscFunctionBegin;
56   info->block_size = 1.0;
57 
58   if (flag == MAT_LOCAL) {
59     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
60     info->nz_used        = info->nz_allocated;
61   } else if (flag == MAT_GLOBAL_MAX) {
62     //ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRMPI(ierr);
63     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
64     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
65   } else if (flag == MAT_GLOBAL_SUM) {
66     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
67     info->nz_allocated   = (*a->emat).AllocatedMemory(); /* locally allocated */
68     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
69     //ierr = MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRMPI(ierr);
70     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
71   }
72 
73   info->nz_unneeded       = 0.0;
74   info->assemblies        = A->num_ass;
75   info->mallocs           = 0;
76   info->memory            = ((PetscObject)A)->mem;
77   info->fill_ratio_given  = 0; /* determined by Elemental */
78   info->fill_ratio_needed = 0;
79   info->factor_mallocs    = 0;
80   PetscFunctionReturn(0);
81 }
82 
83 PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
84 {
85   Mat_Elemental  *a = (Mat_Elemental*)A->data;
86 
87   PetscFunctionBegin;
88   switch (op) {
89   case MAT_NEW_NONZERO_LOCATIONS:
90   case MAT_NEW_NONZERO_LOCATION_ERR:
91   case MAT_NEW_NONZERO_ALLOCATION_ERR:
92   case MAT_SYMMETRIC:
93   case MAT_SORTED_FULL:
94   case MAT_HERMITIAN:
95     break;
96   case MAT_ROW_ORIENTED:
97     a->roworiented = flg;
98     break;
99   default:
100     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
106 {
107   Mat_Elemental  *a = (Mat_Elemental*)A->data;
108   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
109 
110   PetscFunctionBegin;
111   // TODO: Initialize matrix to all zeros?
112 
113   // Count the number of queues from this process
114   if (a->roworiented) {
115     for (i=0; i<nr; i++) {
116       if (rows[i] < 0) continue;
117       P2RO(A,0,rows[i],&rrank,&ridx);
118       RO2E(A,0,rrank,ridx,&erow);
119       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
120       for (j=0; j<nc; j++) {
121         if (cols[j] < 0) continue;
122         P2RO(A,1,cols[j],&crank,&cidx);
123         RO2E(A,1,crank,cidx,&ecol);
124         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
125         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
126           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
127           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
128           ++numQueues;
129           continue;
130         }
131         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
132         switch (imode) {
133         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
134         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
135         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
136         }
137       }
138     }
139 
140     /* printf("numQueues=%d\n",numQueues); */
141     a->emat->Reserve( numQueues);
142     for (i=0; i<nr; i++) {
143       if (rows[i] < 0) continue;
144       P2RO(A,0,rows[i],&rrank,&ridx);
145       RO2E(A,0,rrank,ridx,&erow);
146       for (j=0; j<nc; j++) {
147         if (cols[j] < 0) continue;
148         P2RO(A,1,cols[j],&crank,&cidx);
149         RO2E(A,1,crank,cidx,&ecol);
150         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
151           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
152           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j]);
153         }
154       }
155     }
156   } else { /* columnoriented */
157     for (j=0; j<nc; j++) {
158       if (cols[j] < 0) continue;
159       P2RO(A,1,cols[j],&crank,&cidx);
160       RO2E(A,1,crank,cidx,&ecol);
161       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
162       for (i=0; i<nr; i++) {
163         if (rows[i] < 0) continue;
164         P2RO(A,0,rows[i],&rrank,&ridx);
165         RO2E(A,0,rrank,ridx,&erow);
166         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
167         if (!a->emat->IsLocal(erow,ecol)){ /* off-proc entry */
168           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
169           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
170           ++numQueues;
171           continue;
172         }
173         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
174         switch (imode) {
175         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
176         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
177         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
178         }
179       }
180     }
181 
182     /* printf("numQueues=%d\n",numQueues); */
183     a->emat->Reserve( numQueues);
184     for (j=0; j<nc; j++) {
185       if (cols[j] < 0) continue;
186       P2RO(A,1,cols[j],&crank,&cidx);
187       RO2E(A,1,crank,cidx,&ecol);
188 
189       for (i=0; i<nr; i++) {
190         if (rows[i] < 0) continue;
191         P2RO(A,0,rows[i],&rrank,&ridx);
192         RO2E(A,0,rrank,ridx,&erow);
193         if (!a->emat->IsLocal(erow,ecol)) { /*off-proc entry*/
194           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
195           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr]);
196         }
197       }
198     }
199   }
200   PetscFunctionReturn(0);
201 }
202 
203 static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
204 {
205   Mat_Elemental         *a = (Mat_Elemental*)A->data;
206   PetscErrorCode        ierr;
207   const PetscElemScalar *x;
208   PetscElemScalar       *y;
209   PetscElemScalar       one = 1,zero = 0;
210 
211   PetscFunctionBegin;
212   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
213   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
214   { /* Scoping so that constructor is called before pointer is returned */
215     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
216     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
217     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
218     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
219   }
220   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
221   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
226 {
227   Mat_Elemental         *a = (Mat_Elemental*)A->data;
228   PetscErrorCode        ierr;
229   const PetscElemScalar *x;
230   PetscElemScalar       *y;
231   PetscElemScalar       one = 1,zero = 0;
232 
233   PetscFunctionBegin;
234   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
235   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
236   { /* Scoping so that constructor is called before pointer is returned */
237     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
238     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
239     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
240     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
241   }
242   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
243   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
248 {
249   Mat_Elemental         *a = (Mat_Elemental*)A->data;
250   PetscErrorCode        ierr;
251   const PetscElemScalar *x;
252   PetscElemScalar       *z;
253   PetscElemScalar       one = 1;
254 
255   PetscFunctionBegin;
256   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
257   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
258   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
259   { /* Scoping so that constructor is called before pointer is returned */
260     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
261     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
262     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
263     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
264   }
265   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
266   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
267   PetscFunctionReturn(0);
268 }
269 
270 static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
271 {
272   Mat_Elemental         *a = (Mat_Elemental*)A->data;
273   PetscErrorCode        ierr;
274   const PetscElemScalar *x;
275   PetscElemScalar       *z;
276   PetscElemScalar       one = 1;
277 
278   PetscFunctionBegin;
279   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
280   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
281   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
282   { /* Scoping so that constructor is called before pointer is returned */
283     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
284     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
285     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
286     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
287   }
288   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
289   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
294 {
295   Mat_Elemental    *a = (Mat_Elemental*)A->data;
296   Mat_Elemental    *b = (Mat_Elemental*)B->data;
297   Mat_Elemental    *c = (Mat_Elemental*)C->data;
298   PetscElemScalar  one = 1,zero = 0;
299 
300   PetscFunctionBegin;
301   { /* Scoping so that constructor is called before pointer is returned */
302     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
303   }
304   C->assembled = PETSC_TRUE;
305   PetscFunctionReturn(0);
306 }
307 
308 PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat Ce)
309 {
310   PetscErrorCode ierr;
311 
312   PetscFunctionBegin;
313   ierr = MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
314   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
315   ierr = MatSetUp(Ce);CHKERRQ(ierr);
316   Ce->ops->matmultnumeric = MatMatMultNumeric_Elemental;
317   PetscFunctionReturn(0);
318 }
319 
320 static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
321 {
322   Mat_Elemental      *a = (Mat_Elemental*)A->data;
323   Mat_Elemental      *b = (Mat_Elemental*)B->data;
324   Mat_Elemental      *c = (Mat_Elemental*)C->data;
325   PetscElemScalar    one = 1,zero = 0;
326 
327   PetscFunctionBegin;
328   { /* Scoping so that constructor is called before pointer is returned */
329     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
330   }
331   C->assembled = PETSC_TRUE;
332   PetscFunctionReturn(0);
333 }
334 
335 static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat C)
336 {
337   PetscErrorCode ierr;
338 
339   PetscFunctionBegin;
340   ierr = MatSetSizes(C,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
341   ierr = MatSetType(C,MATELEMENTAL);CHKERRQ(ierr);
342   ierr = MatSetUp(C);CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 
346 /* --------------------------------------- */
347 static PetscErrorCode MatProductSetFromOptions_Elemental_AB(Mat C)
348 {
349   PetscFunctionBegin;
350   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental;
351   C->ops->productsymbolic = MatProductSymbolic_AB;
352   PetscFunctionReturn(0);
353 }
354 
355 static PetscErrorCode MatProductSetFromOptions_Elemental_ABt(Mat C)
356 {
357   PetscFunctionBegin;
358   C->ops->mattransposemultsymbolic = MatMatTransposeMultSymbolic_Elemental;
359   C->ops->productsymbolic          = MatProductSymbolic_ABt;
360   PetscFunctionReturn(0);
361 }
362 
363 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Elemental(Mat C)
364 {
365   PetscErrorCode ierr;
366   Mat_Product    *product = C->product;
367 
368   PetscFunctionBegin;
369   switch (product->type) {
370   case MATPRODUCT_AB:
371     ierr = MatProductSetFromOptions_Elemental_AB(C);CHKERRQ(ierr);
372     break;
373   case MATPRODUCT_ABt:
374     ierr = MatProductSetFromOptions_Elemental_ABt(C);CHKERRQ(ierr);
375     break;
376   default:
377     break;
378   }
379   PetscFunctionReturn(0);
380 }
381 
382 PetscErrorCode MatMatMultNumeric_Elemental_MPIDense(Mat A,Mat B,Mat C)
383 {
384   Mat            Be,Ce;
385   PetscErrorCode ierr;
386 
387   PetscFunctionBegin;
388   ierr = MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&Be);CHKERRQ(ierr);
389   ierr = MatMatMult(A,Be,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Ce);CHKERRQ(ierr);
390   ierr = MatConvert(Ce,MATMPIDENSE,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr);
391   ierr = MatDestroy(&Be);CHKERRQ(ierr);
392   ierr = MatDestroy(&Ce);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 PetscErrorCode MatMatMultSymbolic_Elemental_MPIDense(Mat A,Mat B,PetscReal fill,Mat C)
397 {
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   ierr = MatSetSizes(C,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
402   ierr = MatSetType(C,MATMPIDENSE);CHKERRQ(ierr);
403   ierr = MatSetUp(C);CHKERRQ(ierr);
404   C->ops->matmultnumeric = MatMatMultNumeric_Elemental_MPIDense;
405   PetscFunctionReturn(0);
406 }
407 
408 PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense_AB(Mat C)
409 {
410   PetscFunctionBegin;
411   C->ops->matmultsymbolic = MatMatMultSymbolic_Elemental_MPIDense;
412   C->ops->productsymbolic = MatProductSymbolic_AB;
413   PetscFunctionReturn(0);
414 }
415 
416 PetscErrorCode MatProductSetFromOptions_Elemental_MPIDense(Mat C)
417 {
418   PetscErrorCode ierr;
419   Mat_Product    *product = C->product;
420 
421   PetscFunctionBegin;
422   if (product->type == MATPRODUCT_AB) {
423     ierr = MatProductSetFromOptions_Elemental_MPIDense_AB(C);CHKERRQ(ierr);
424   }
425   PetscFunctionReturn(0);
426 }
427 /* --------------------------------------- */
428 
429 static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
430 {
431   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
432   Mat_Elemental   *a = (Mat_Elemental*)A->data;
433   PetscErrorCode  ierr;
434   PetscElemScalar v;
435   MPI_Comm        comm;
436 
437   PetscFunctionBegin;
438   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
439   ierr = MatGetSize(A,&nrows,&ncols);CHKERRQ(ierr);
440   nD = nrows>ncols ? ncols : nrows;
441   for (i=0; i<nD; i++) {
442     PetscInt erow,ecol;
443     P2RO(A,0,i,&rrank,&ridx);
444     RO2E(A,0,rrank,ridx,&erow);
445     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
446     P2RO(A,1,i,&crank,&cidx);
447     RO2E(A,1,crank,cidx,&ecol);
448     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
449     v = a->emat->Get(erow,ecol);
450     ierr = VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);CHKERRQ(ierr);
451   }
452   ierr = VecAssemblyBegin(D);CHKERRQ(ierr);
453   ierr = VecAssemblyEnd(D);CHKERRQ(ierr);
454   PetscFunctionReturn(0);
455 }
456 
457 static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
458 {
459   Mat_Elemental         *x = (Mat_Elemental*)X->data;
460   const PetscElemScalar *d;
461   PetscErrorCode        ierr;
462 
463   PetscFunctionBegin;
464   if (R) {
465     ierr = VecGetArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
466     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
467     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
468     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
469     ierr = VecRestoreArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
470   }
471   if (L) {
472     ierr = VecGetArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
473     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
474     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
475     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
476     ierr = VecRestoreArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
477   }
478   PetscFunctionReturn(0);
479 }
480 
481 static PetscErrorCode MatMissingDiagonal_Elemental(Mat A,PetscBool *missing,PetscInt *d)
482 {
483   PetscFunctionBegin;
484   *missing = PETSC_FALSE;
485   PetscFunctionReturn(0);
486 }
487 
488 static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
489 {
490   Mat_Elemental  *x = (Mat_Elemental*)X->data;
491 
492   PetscFunctionBegin;
493   El::Scale((PetscElemScalar)a,*x->emat);
494   PetscFunctionReturn(0);
495 }
496 
497 /*
498   MatAXPY - Computes Y = a*X + Y.
499 */
500 static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
501 {
502   Mat_Elemental  *x = (Mat_Elemental*)X->data;
503   Mat_Elemental  *y = (Mat_Elemental*)Y->data;
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
508   ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
509   PetscFunctionReturn(0);
510 }
511 
512 static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
513 {
514   Mat_Elemental *a=(Mat_Elemental*)A->data;
515   Mat_Elemental *b=(Mat_Elemental*)B->data;
516   PetscErrorCode ierr;
517 
518   PetscFunctionBegin;
519   El::Copy(*a->emat,*b->emat);
520   ierr = PetscObjectStateIncrease((PetscObject)B);CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
525 {
526   Mat            Be;
527   MPI_Comm       comm;
528   Mat_Elemental  *a=(Mat_Elemental*)A->data;
529   PetscErrorCode ierr;
530 
531   PetscFunctionBegin;
532   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
533   ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
534   ierr = MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
535   ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
536   ierr = MatSetUp(Be);CHKERRQ(ierr);
537   *B = Be;
538   if (op == MAT_COPY_VALUES) {
539     Mat_Elemental *b=(Mat_Elemental*)Be->data;
540     El::Copy(*a->emat,*b->emat);
541   }
542   Be->assembled = PETSC_TRUE;
543   PetscFunctionReturn(0);
544 }
545 
546 static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
547 {
548   Mat            Be = *B;
549   PetscErrorCode ierr;
550   MPI_Comm       comm;
551   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
552 
553   PetscFunctionBegin;
554   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
555   /* Only out-of-place supported */
556   if (reuse == MAT_INPLACE_MATRIX) SETERRQ(comm,PETSC_ERR_SUP,"Only out-of-place supported");
557   if (reuse == MAT_INITIAL_MATRIX) {
558     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
559     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
560     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
561     ierr = MatSetUp(Be);CHKERRQ(ierr);
562     *B = Be;
563   }
564   b = (Mat_Elemental*)Be->data;
565   El::Transpose(*a->emat,*b->emat);
566   Be->assembled = PETSC_TRUE;
567   PetscFunctionReturn(0);
568 }
569 
570 static PetscErrorCode MatConjugate_Elemental(Mat A)
571 {
572   Mat_Elemental  *a = (Mat_Elemental*)A->data;
573 
574   PetscFunctionBegin;
575   El::Conjugate(*a->emat);
576   PetscFunctionReturn(0);
577 }
578 
579 static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
580 {
581   Mat            Be = *B;
582   PetscErrorCode ierr;
583   MPI_Comm       comm;
584   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
585 
586   PetscFunctionBegin;
587   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
588   /* Only out-of-place supported */
589   if (reuse == MAT_INITIAL_MATRIX){
590     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
591     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
592     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
593     ierr = MatSetUp(Be);CHKERRQ(ierr);
594     *B = Be;
595   }
596   b = (Mat_Elemental*)Be->data;
597   El::Adjoint(*a->emat,*b->emat);
598   Be->assembled = PETSC_TRUE;
599   PetscFunctionReturn(0);
600 }
601 
602 static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
603 {
604   Mat_Elemental     *a = (Mat_Elemental*)A->data;
605   PetscErrorCode    ierr;
606   PetscElemScalar   *x;
607   PetscInt          pivoting = a->pivoting;
608 
609   PetscFunctionBegin;
610   ierr = VecCopy(B,X);CHKERRQ(ierr);
611   ierr = VecGetArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
612 
613   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
614   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
615   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
616   switch (A->factortype) {
617   case MAT_FACTOR_LU:
618     if (pivoting == 0) {
619       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
620     } else if (pivoting == 1) {
621       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,xer);
622     } else { /* pivoting == 2 */
623       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,xer);
624     }
625     break;
626   case MAT_FACTOR_CHOLESKY:
627     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
628     break;
629   default:
630     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
631     break;
632   }
633   El::Copy(xer,xe);
634 
635   ierr = VecRestoreArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
640 {
641   PetscErrorCode    ierr;
642 
643   PetscFunctionBegin;
644   ierr = MatSolve_Elemental(A,B,X);CHKERRQ(ierr);
645   ierr = VecAXPY(X,1,Y);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
650 {
651   Mat_Elemental  *a = (Mat_Elemental*)A->data;
652   Mat_Elemental  *x;
653   Mat            C;
654   PetscInt       pivoting = a->pivoting;
655   PetscBool      flg;
656   MatType        type;
657   PetscErrorCode ierr;
658 
659   PetscFunctionBegin;
660   ierr = MatGetType(X,&type);CHKERRQ(ierr);
661   ierr = PetscStrcmp(type,MATELEMENTAL,&flg);CHKERRQ(ierr);
662   if (!flg) {
663     ierr = MatConvert(B,MATELEMENTAL,MAT_INITIAL_MATRIX,&C);CHKERRQ(ierr);
664     x = (Mat_Elemental*)C->data;
665   } else {
666     x = (Mat_Elemental*)X->data;
667     El::Copy(*((Mat_Elemental*)B->data)->emat,*x->emat);
668   }
669   switch (A->factortype) {
670   case MAT_FACTOR_LU:
671     if (pivoting == 0) {
672       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
673     } else if (pivoting == 1) {
674       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*x->emat);
675     } else {
676       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->P,*a->Q,*x->emat);
677     }
678     break;
679   case MAT_FACTOR_CHOLESKY:
680     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
681     break;
682   default:
683     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
684     break;
685   }
686   if (!flg) {
687     ierr = MatConvert(C,type,MAT_REUSE_MATRIX,&X);CHKERRQ(ierr);
688     ierr = MatDestroy(&C);CHKERRQ(ierr);
689   }
690   PetscFunctionReturn(0);
691 }
692 
693 static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
694 {
695   Mat_Elemental  *a = (Mat_Elemental*)A->data;
696   PetscErrorCode ierr;
697   PetscInt       pivoting = a->pivoting;
698 
699   PetscFunctionBegin;
700   if (pivoting == 0) {
701     El::LU(*a->emat);
702   } else if (pivoting == 1) {
703     El::LU(*a->emat,*a->P);
704   } else {
705     El::LU(*a->emat,*a->P,*a->Q);
706   }
707   A->factortype = MAT_FACTOR_LU;
708   A->assembled  = PETSC_TRUE;
709 
710   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
711   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
712   PetscFunctionReturn(0);
713 }
714 
715 static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
716 {
717   PetscErrorCode ierr;
718 
719   PetscFunctionBegin;
720   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
721   ierr = MatLUFactor_Elemental(F,0,0,info);CHKERRQ(ierr);
722   PetscFunctionReturn(0);
723 }
724 
725 static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
726 {
727   PetscFunctionBegin;
728   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
729   PetscFunctionReturn(0);
730 }
731 
732 static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
733 {
734   Mat_Elemental  *a = (Mat_Elemental*)A->data;
735   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
736   PetscErrorCode ierr;
737 
738   PetscFunctionBegin;
739   El::Cholesky(El::UPPER,*a->emat);
740   A->factortype = MAT_FACTOR_CHOLESKY;
741   A->assembled  = PETSC_TRUE;
742 
743   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
744   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
745   PetscFunctionReturn(0);
746 }
747 
748 static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
749 {
750   PetscErrorCode ierr;
751 
752   PetscFunctionBegin;
753   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
754   ierr = MatCholeskyFactor_Elemental(F,0,info);CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
759 {
760   PetscFunctionBegin;
761   /* F is created and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
762   PetscFunctionReturn(0);
763 }
764 
765 PetscErrorCode MatFactorGetSolverType_elemental_elemental(Mat A,MatSolverType *type)
766 {
767   PetscFunctionBegin;
768   *type = MATSOLVERELEMENTAL;
769   PetscFunctionReturn(0);
770 }
771 
772 static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
773 {
774   Mat            B;
775   PetscErrorCode ierr;
776 
777   PetscFunctionBegin;
778   /* Create the factorization matrix */
779   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
780   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
781   ierr = MatSetType(B,MATELEMENTAL);CHKERRQ(ierr);
782   ierr = MatSetUp(B);CHKERRQ(ierr);
783   B->factortype = ftype;
784   B->trivialsymbolic = PETSC_TRUE;
785   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
786   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);CHKERRQ(ierr);
787 
788   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverType_C",MatFactorGetSolverType_elemental_elemental);CHKERRQ(ierr);
789   *F            = B;
790   PetscFunctionReturn(0);
791 }
792 
793 PETSC_EXTERN PetscErrorCode MatSolverTypeRegister_Elemental(void)
794 {
795   PetscErrorCode ierr;
796 
797   PetscFunctionBegin;
798   ierr = MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
799   ierr = MatSolverTypeRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
800   PetscFunctionReturn(0);
801 }
802 
803 static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
804 {
805   Mat_Elemental *a=(Mat_Elemental*)A->data;
806 
807   PetscFunctionBegin;
808   switch (type){
809   case NORM_1:
810     *nrm = El::OneNorm(*a->emat);
811     break;
812   case NORM_FROBENIUS:
813     *nrm = El::FrobeniusNorm(*a->emat);
814     break;
815   case NORM_INFINITY:
816     *nrm = El::InfinityNorm(*a->emat);
817     break;
818   default:
819     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unsupported norm type");
820   }
821   PetscFunctionReturn(0);
822 }
823 
824 static PetscErrorCode MatZeroEntries_Elemental(Mat A)
825 {
826   Mat_Elemental *a=(Mat_Elemental*)A->data;
827 
828   PetscFunctionBegin;
829   El::Zero(*a->emat);
830   PetscFunctionReturn(0);
831 }
832 
833 static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
834 {
835   Mat_Elemental  *a = (Mat_Elemental*)A->data;
836   PetscErrorCode ierr;
837   PetscInt       i,m,shift,stride,*idx;
838 
839   PetscFunctionBegin;
840   if (rows) {
841     m = a->emat->LocalHeight();
842     shift = a->emat->ColShift();
843     stride = a->emat->ColStride();
844     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
845     for (i=0; i<m; i++) {
846       PetscInt rank,offset;
847       E2RO(A,0,shift+i*stride,&rank,&offset);
848       RO2P(A,0,rank,offset,&idx[i]);
849     }
850     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);CHKERRQ(ierr);
851   }
852   if (cols) {
853     m = a->emat->LocalWidth();
854     shift = a->emat->RowShift();
855     stride = a->emat->RowStride();
856     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
857     for (i=0; i<m; i++) {
858       PetscInt rank,offset;
859       E2RO(A,1,shift+i*stride,&rank,&offset);
860       RO2P(A,1,rank,offset,&idx[i]);
861     }
862     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);CHKERRQ(ierr);
863   }
864   PetscFunctionReturn(0);
865 }
866 
867 static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
868 {
869   Mat                Bmpi;
870   Mat_Elemental      *a = (Mat_Elemental*)A->data;
871   MPI_Comm           comm;
872   PetscErrorCode     ierr;
873   IS                 isrows,iscols;
874   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
875   const PetscInt     *rows,*cols;
876   PetscElemScalar    v;
877   const El::Grid     &grid = a->emat->Grid();
878 
879   PetscFunctionBegin;
880   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
881 
882   if (reuse == MAT_REUSE_MATRIX) {
883     Bmpi = *B;
884   } else {
885     ierr = MatCreate(comm,&Bmpi);CHKERRQ(ierr);
886     ierr = MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
887     ierr = MatSetType(Bmpi,MATDENSE);CHKERRQ(ierr);
888     ierr = MatSetUp(Bmpi);CHKERRQ(ierr);
889   }
890 
891   /* Get local entries of A */
892   ierr = MatGetOwnershipIS(A,&isrows,&iscols);CHKERRQ(ierr);
893   ierr = ISGetLocalSize(isrows,&nrows);CHKERRQ(ierr);
894   ierr = ISGetIndices(isrows,&rows);CHKERRQ(ierr);
895   ierr = ISGetLocalSize(iscols,&ncols);CHKERRQ(ierr);
896   ierr = ISGetIndices(iscols,&cols);CHKERRQ(ierr);
897 
898   if (a->roworiented) {
899     for (i=0; i<nrows; i++) {
900       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
901       RO2E(A,0,rrank,ridx,&erow);
902       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
903       for (j=0; j<ncols; j++) {
904         P2RO(A,1,cols[j],&crank,&cidx);
905         RO2E(A,1,crank,cidx,&ecol);
906         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
907 
908         elrow = erow / grid.MCSize(); /* Elemental local row index */
909         elcol = ecol / grid.MRSize(); /* Elemental local column index */
910         v = a->emat->GetLocal(elrow,elcol);
911         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
912       }
913     }
914   } else { /* column-oriented */
915     for (j=0; j<ncols; j++) {
916       P2RO(A,1,cols[j],&crank,&cidx);
917       RO2E(A,1,crank,cidx,&ecol);
918       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
919       for (i=0; i<nrows; i++) {
920         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
921         RO2E(A,0,rrank,ridx,&erow);
922         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
923 
924         elrow = erow / grid.MCSize(); /* Elemental local row index */
925         elcol = ecol / grid.MRSize(); /* Elemental local column index */
926         v = a->emat->GetLocal(elrow,elcol);
927         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
928       }
929     }
930   }
931   ierr = MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
932   ierr = MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
933   if (reuse == MAT_INPLACE_MATRIX) {
934     ierr = MatHeaderReplace(A,&Bmpi);CHKERRQ(ierr);
935   } else {
936     *B = Bmpi;
937   }
938   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
939   ierr = ISDestroy(&iscols);CHKERRQ(ierr);
940   PetscFunctionReturn(0);
941 }
942 
943 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
944 {
945   Mat               mat_elemental;
946   PetscErrorCode    ierr;
947   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
948   const PetscInt    *cols;
949   const PetscScalar *vals;
950 
951   PetscFunctionBegin;
952   if (reuse == MAT_REUSE_MATRIX) {
953     mat_elemental = *newmat;
954     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
955   } else {
956     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
957     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
958     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
959     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
960   }
961   for (row=0; row<M; row++) {
962     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
963     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
964     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
965     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
966   }
967   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
968   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
969 
970   if (reuse == MAT_INPLACE_MATRIX) {
971     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
972   } else {
973     *newmat = mat_elemental;
974   }
975   PetscFunctionReturn(0);
976 }
977 
978 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
979 {
980   Mat               mat_elemental;
981   PetscErrorCode    ierr;
982   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
983   const PetscInt    *cols;
984   const PetscScalar *vals;
985 
986   PetscFunctionBegin;
987   if (reuse == MAT_REUSE_MATRIX) {
988     mat_elemental = *newmat;
989     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
990   } else {
991     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
992     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
993     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
994     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
995   }
996   for (row=rstart; row<rend; row++) {
997     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
998     for (j=0; j<ncols; j++) {
999       /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1000       ierr = MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);CHKERRQ(ierr);
1001     }
1002     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1003   }
1004   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1005   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1006 
1007   if (reuse == MAT_INPLACE_MATRIX) {
1008     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1009   } else {
1010     *newmat = mat_elemental;
1011   }
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1016 {
1017   Mat               mat_elemental;
1018   PetscErrorCode    ierr;
1019   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1020   const PetscInt    *cols;
1021   const PetscScalar *vals;
1022 
1023   PetscFunctionBegin;
1024   if (reuse == MAT_REUSE_MATRIX) {
1025     mat_elemental = *newmat;
1026     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
1027   } else {
1028     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1029     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1030     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1031     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1032   }
1033   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1034   for (row=0; row<M; row++) {
1035     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1036     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1037     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1038     for (j=0; j<ncols; j++) { /* lower triangular part */
1039       PetscScalar v;
1040       if (cols[j] == row) continue;
1041       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1042       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);CHKERRQ(ierr);
1043     }
1044     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1045   }
1046   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1047   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1048   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1049 
1050   if (reuse == MAT_INPLACE_MATRIX) {
1051     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1052   } else {
1053     *newmat = mat_elemental;
1054   }
1055   PetscFunctionReturn(0);
1056 }
1057 
1058 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1059 {
1060   Mat               mat_elemental;
1061   PetscErrorCode    ierr;
1062   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1063   const PetscInt    *cols;
1064   const PetscScalar *vals;
1065 
1066   PetscFunctionBegin;
1067   if (reuse == MAT_REUSE_MATRIX) {
1068     mat_elemental = *newmat;
1069     ierr = MatZeroEntries(mat_elemental);CHKERRQ(ierr);
1070   } else {
1071     ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1072     ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1073     ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1074     ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1075   }
1076   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1077   for (row=rstart; row<rend; row++) {
1078     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1079     /* PETSc-Elemental interface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1080     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1081     for (j=0; j<ncols; j++) { /* lower triangular part */
1082       PetscScalar v;
1083       if (cols[j] == row) continue;
1084       v    = A->hermitian ? PetscConj(vals[j]) : vals[j];
1085       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&v,ADD_VALUES);CHKERRQ(ierr);
1086     }
1087     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1088   }
1089   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1090   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1091   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1092 
1093   if (reuse == MAT_INPLACE_MATRIX) {
1094     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1095   } else {
1096     *newmat = mat_elemental;
1097   }
1098   PetscFunctionReturn(0);
1099 }
1100 
1101 static PetscErrorCode MatDestroy_Elemental(Mat A)
1102 {
1103   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1104   PetscErrorCode     ierr;
1105   Mat_Elemental_Grid *commgrid;
1106   PetscBool          flg;
1107   MPI_Comm           icomm;
1108 
1109   PetscFunctionBegin;
1110   delete a->emat;
1111   delete a->P;
1112   delete a->Q;
1113 
1114   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1115   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1116   ierr = MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRMPI(ierr);
1117   if (--commgrid->grid_refct == 0) {
1118     delete commgrid->grid;
1119     ierr = PetscFree(commgrid);CHKERRQ(ierr);
1120     ierr = MPI_Comm_free_keyval(&Petsc_Elemental_keyval);CHKERRMPI(ierr);
1121   }
1122   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1123   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);CHKERRQ(ierr);
1124   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverType_C",NULL);CHKERRQ(ierr);
1125   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",NULL);CHKERRQ(ierr);
1126   ierr = PetscFree(A->data);CHKERRQ(ierr);
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 PetscErrorCode MatSetUp_Elemental(Mat A)
1131 {
1132   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1133   PetscErrorCode ierr;
1134   MPI_Comm       comm;
1135   PetscMPIInt    rsize,csize;
1136   PetscInt       n;
1137 
1138   PetscFunctionBegin;
1139   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1140   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1141 
1142   /* Check if local row and column sizes are equally distributed.
1143      Jed: Elemental uses "element" cyclic ordering so the sizes need to match that
1144      exactly.  The strategy in MatElemental is for PETSc to implicitly permute to block ordering (like would be returned by
1145      PetscSplitOwnership(comm,&n,&N), at which point Elemental matrices can act on PETSc vectors without redistributing the vectors. */
1146   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1147   n = PETSC_DECIDE;
1148   ierr = PetscSplitOwnership(comm,&n,&A->rmap->N);CHKERRQ(ierr);
1149   if (n != A->rmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local row size %D of ELEMENTAL matrix must be equally distributed",A->rmap->n);
1150 
1151   n = PETSC_DECIDE;
1152   ierr = PetscSplitOwnership(comm,&n,&A->cmap->N);CHKERRQ(ierr);
1153   if (n != A->cmap->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local column size %D of ELEMENTAL matrix must be equally distributed",A->cmap->n);
1154 
1155   a->emat->Resize(A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1156   El::Zero(*a->emat);
1157 
1158   ierr = MPI_Comm_size(A->rmap->comm,&rsize);CHKERRMPI(ierr);
1159   ierr = MPI_Comm_size(A->cmap->comm,&csize);CHKERRMPI(ierr);
1160   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1161   a->commsize = rsize;
1162   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1163   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1164   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1165   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1166   PetscFunctionReturn(0);
1167 }
1168 
1169 PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1170 {
1171   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1172 
1173   PetscFunctionBegin;
1174   /* printf("Calling ProcessQueues\n"); */
1175   a->emat->ProcessQueues();
1176   /* printf("Finished ProcessQueues\n"); */
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1181 {
1182   PetscFunctionBegin;
1183   /* Currently does nothing */
1184   PetscFunctionReturn(0);
1185 }
1186 
1187 PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1188 {
1189   PetscErrorCode ierr;
1190   Mat            Adense,Ae;
1191   MPI_Comm       comm;
1192 
1193   PetscFunctionBegin;
1194   ierr = PetscObjectGetComm((PetscObject)newMat,&comm);CHKERRQ(ierr);
1195   ierr = MatCreate(comm,&Adense);CHKERRQ(ierr);
1196   ierr = MatSetType(Adense,MATDENSE);CHKERRQ(ierr);
1197   ierr = MatLoad(Adense,viewer);CHKERRQ(ierr);
1198   ierr = MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);CHKERRQ(ierr);
1199   ierr = MatDestroy(&Adense);CHKERRQ(ierr);
1200   ierr = MatHeaderReplace(newMat,&Ae);CHKERRQ(ierr);
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 /* -------------------------------------------------------------------*/
1205 static struct _MatOps MatOps_Values = {
1206        MatSetValues_Elemental,
1207        0,
1208        0,
1209        MatMult_Elemental,
1210 /* 4*/ MatMultAdd_Elemental,
1211        MatMultTranspose_Elemental,
1212        MatMultTransposeAdd_Elemental,
1213        MatSolve_Elemental,
1214        MatSolveAdd_Elemental,
1215        0,
1216 /*10*/ 0,
1217        MatLUFactor_Elemental,
1218        MatCholeskyFactor_Elemental,
1219        0,
1220        MatTranspose_Elemental,
1221 /*15*/ MatGetInfo_Elemental,
1222        0,
1223        MatGetDiagonal_Elemental,
1224        MatDiagonalScale_Elemental,
1225        MatNorm_Elemental,
1226 /*20*/ MatAssemblyBegin_Elemental,
1227        MatAssemblyEnd_Elemental,
1228        MatSetOption_Elemental,
1229        MatZeroEntries_Elemental,
1230 /*24*/ 0,
1231        MatLUFactorSymbolic_Elemental,
1232        MatLUFactorNumeric_Elemental,
1233        MatCholeskyFactorSymbolic_Elemental,
1234        MatCholeskyFactorNumeric_Elemental,
1235 /*29*/ MatSetUp_Elemental,
1236        0,
1237        0,
1238        0,
1239        0,
1240 /*34*/ MatDuplicate_Elemental,
1241        0,
1242        0,
1243        0,
1244        0,
1245 /*39*/ MatAXPY_Elemental,
1246        0,
1247        0,
1248        0,
1249        MatCopy_Elemental,
1250 /*44*/ 0,
1251        MatScale_Elemental,
1252        MatShift_Basic,
1253        0,
1254        0,
1255 /*49*/ 0,
1256        0,
1257        0,
1258        0,
1259        0,
1260 /*54*/ 0,
1261        0,
1262        0,
1263        0,
1264        0,
1265 /*59*/ 0,
1266        MatDestroy_Elemental,
1267        MatView_Elemental,
1268        0,
1269        0,
1270 /*64*/ 0,
1271        0,
1272        0,
1273        0,
1274        0,
1275 /*69*/ 0,
1276        0,
1277        MatConvert_Elemental_Dense,
1278        0,
1279        0,
1280 /*74*/ 0,
1281        0,
1282        0,
1283        0,
1284        0,
1285 /*79*/ 0,
1286        0,
1287        0,
1288        0,
1289        MatLoad_Elemental,
1290 /*84*/ 0,
1291        0,
1292        0,
1293        0,
1294        0,
1295 /*89*/ 0,
1296        0,
1297        MatMatMultNumeric_Elemental,
1298        0,
1299        0,
1300 /*94*/ 0,
1301        0,
1302        0,
1303        MatMatTransposeMultNumeric_Elemental,
1304        0,
1305 /*99*/ MatProductSetFromOptions_Elemental,
1306        0,
1307        0,
1308        MatConjugate_Elemental,
1309        0,
1310 /*104*/0,
1311        0,
1312        0,
1313        0,
1314        0,
1315 /*109*/MatMatSolve_Elemental,
1316        0,
1317        0,
1318        0,
1319        MatMissingDiagonal_Elemental,
1320 /*114*/0,
1321        0,
1322        0,
1323        0,
1324        0,
1325 /*119*/0,
1326        MatHermitianTranspose_Elemental,
1327        0,
1328        0,
1329        0,
1330 /*124*/0,
1331        0,
1332        0,
1333        0,
1334        0,
1335 /*129*/0,
1336        0,
1337        0,
1338        0,
1339        0,
1340 /*134*/0,
1341        0,
1342        0,
1343        0,
1344        0,
1345        0,
1346 /*140*/0,
1347        0,
1348        0,
1349        0,
1350        0,
1351 /*145*/0,
1352        0,
1353        0
1354 };
1355 
1356 /*MC
1357    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1358 
1359   Use ./configure --download-elemental to install PETSc to use Elemental
1360 
1361   Use -pc_type lu -pc_factor_mat_solver_type elemental to use this direct solver
1362 
1363    Options Database Keys:
1364 + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1365 - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1366 
1367   Level: beginner
1368 
1369 .seealso: MATDENSE
1370 M*/
1371 
1372 PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1373 {
1374   Mat_Elemental      *a;
1375   PetscErrorCode     ierr;
1376   PetscBool          flg,flg1;
1377   Mat_Elemental_Grid *commgrid;
1378   MPI_Comm           icomm;
1379   PetscInt           optv1;
1380 
1381   PetscFunctionBegin;
1382   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1383   A->insertmode = NOT_SET_VALUES;
1384 
1385   ierr = PetscNewLog(A,&a);CHKERRQ(ierr);
1386   A->data = (void*)a;
1387 
1388   /* Set up the elemental matrix */
1389   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1390 
1391   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1392   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1393     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);CHKERRMPI(ierr);
1394   }
1395   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1396   ierr = MPI_Comm_get_attr(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRMPI(ierr);
1397   if (!flg) {
1398     ierr = PetscNewLog(A,&commgrid);CHKERRQ(ierr);
1399 
1400     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");CHKERRQ(ierr);
1401     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1402     ierr = PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);CHKERRQ(ierr);
1403     if (flg1) {
1404       if (El::mpi::Size(cxxcomm) % optv1) SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1405       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1406     } else {
1407       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1408       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1409     }
1410     commgrid->grid_refct = 1;
1411     ierr = MPI_Comm_set_attr(icomm,Petsc_Elemental_keyval,(void*)commgrid);CHKERRMPI(ierr);
1412 
1413     a->pivoting    = 1;
1414     ierr = PetscOptionsInt("-mat_elemental_pivoting","Pivoting","None",a->pivoting,&a->pivoting,NULL);CHKERRQ(ierr);
1415 
1416     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1417   } else {
1418     commgrid->grid_refct++;
1419   }
1420   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1421   a->grid        = commgrid->grid;
1422   a->emat        = new El::DistMatrix<PetscElemScalar>(*a->grid);
1423   a->roworiented = PETSC_TRUE;
1424 
1425   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);CHKERRQ(ierr);
1426   ierr = PetscObjectComposeFunction((PetscObject)A,"MatProductSetFromOptions_elemental_mpidense_C",MatProductSetFromOptions_Elemental_MPIDense);CHKERRQ(ierr);
1427   ierr = PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430