xref: /petsc/src/mat/impls/elemental/matelem.cxx (revision ea8526b9f979809fe23138b24c7e61d2023d167c)
1 #include <../src/mat/impls/elemental/matelemimpl.h> /*I "petscmat.h" I*/
2 
3 /*
4     The variable Petsc_Elemental_keyval is used to indicate an MPI attribute that
5   is attached to a communicator, in this case the attribute is a Mat_Elemental_Grid
6 */
7 static PetscMPIInt Petsc_Elemental_keyval = MPI_KEYVAL_INVALID;
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "PetscElementalInitializePackage"
11 /*@C
12    PetscElementalInitializePackage - Initialize Elemental package
13 
14    Logically Collective
15 
16    Level: developer
17 
18 .seealso: MATELEMENTAL, PetscElementalFinalizePackage()
19 @*/
20 PetscErrorCode PetscElementalInitializePackage(void)
21 {
22   PetscErrorCode ierr;
23 
24   PetscFunctionBegin;
25   if (El::Initialized()) PetscFunctionReturn(0);
26   El::Initialize();   /* called by the 1st call of MatCreate_Elemental */
27   ierr = PetscRegisterFinalize(PetscElementalFinalizePackage);CHKERRQ(ierr);
28   PetscFunctionReturn(0);
29 }
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "PetscElementalFinalizePackage"
33 /*@C
34    PetscElementalFinalizePackage - Finalize Elemental package
35 
36    Logically Collective
37 
38    Level: developer
39 
40 .seealso: MATELEMENTAL, PetscElementalInitializePackage()
41 @*/
42 PetscErrorCode PetscElementalFinalizePackage(void)
43 {
44   PetscFunctionBegin;
45   El::Finalize();  /* called by PetscFinalize() */
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "MatView_Elemental"
51 static PetscErrorCode MatView_Elemental(Mat A,PetscViewer viewer)
52 {
53   PetscErrorCode ierr;
54   Mat_Elemental  *a = (Mat_Elemental*)A->data;
55   PetscBool      iascii;
56 
57   PetscFunctionBegin;
58   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
59   if (iascii) {
60     PetscViewerFormat format;
61     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
62     if (format == PETSC_VIEWER_ASCII_INFO) {
63       /* call elemental viewing function */
64       ierr = PetscViewerASCIIPrintf(viewer,"Elemental run parameters:\n");CHKERRQ(ierr);
65       ierr = PetscViewerASCIIPrintf(viewer,"  allocated entries=%d\n",(*a->emat).AllocatedMemory());CHKERRQ(ierr);
66       ierr = PetscViewerASCIIPrintf(viewer,"  grid height=%d, grid width=%d\n",(*a->emat).Grid().Height(),(*a->emat).Grid().Width());CHKERRQ(ierr);
67       if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
68         /* call elemental viewing function */
69         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"test matview_elemental 2\n");CHKERRQ(ierr);
70       }
71 
72     } else if (format == PETSC_VIEWER_DEFAULT) {
73       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
74       El::Print( *a->emat, "Elemental matrix (cyclic ordering)" );
75       ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
76       if (A->factortype == MAT_FACTOR_NONE){
77         Mat Adense;
78         ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
79         ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
80         ierr = MatView(Adense,viewer);CHKERRQ(ierr);
81         ierr = MatDestroy(&Adense);CHKERRQ(ierr);
82       }
83     } else SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_SUP,"Format");
84   } else {
85     /* convert to dense format and call MatView() */
86     Mat Adense;
87     ierr = PetscPrintf(PetscObjectComm((PetscObject)viewer),"Elemental matrix (explicit ordering)\n");CHKERRQ(ierr);
88     ierr = MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&Adense);CHKERRQ(ierr);
89     ierr = MatView(Adense,viewer);CHKERRQ(ierr);
90     ierr = MatDestroy(&Adense);CHKERRQ(ierr);
91   }
92   PetscFunctionReturn(0);
93 }
94 
95 #undef __FUNCT__
96 #define __FUNCT__ "MatGetInfo_Elemental"
97 static PetscErrorCode MatGetInfo_Elemental(Mat A,MatInfoType flag,MatInfo *info)
98 {
99   Mat_Elemental  *a = (Mat_Elemental*)A->data;
100 
101   PetscFunctionBegin;
102   info->block_size = 1.0;
103 
104   if (flag == MAT_LOCAL) {
105     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
106     info->nz_used        = info->nz_allocated;
107   } else if (flag == MAT_GLOBAL_MAX) {
108     //ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)matin));CHKERRQ(ierr);
109     /* see MatGetInfo_MPIAIJ() for getting global info->nz_allocated! */
110     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_MAX not written yet");
111   } else if (flag == MAT_GLOBAL_SUM) {
112     //SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," MAT_GLOBAL_SUM not written yet");
113     info->nz_allocated   = (double)(*a->emat).AllocatedMemory(); /* locally allocated */
114     info->nz_used        = info->nz_allocated; /* assume Elemental does accurate allocation */
115     //ierr = MPIU_Allreduce(isend,irecv,1,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
116     //PetscPrintf(PETSC_COMM_SELF,"    ... [%d] locally allocated %g\n",rank,info->nz_allocated);
117   }
118 
119   info->nz_unneeded       = 0.0;
120   info->assemblies        = (double)A->num_ass;
121   info->mallocs           = 0;
122   info->memory            = ((PetscObject)A)->mem;
123   info->fill_ratio_given  = 0; /* determined by Elemental */
124   info->fill_ratio_needed = 0;
125   info->factor_mallocs    = 0;
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatSetOption_Elemental"
131 PetscErrorCode MatSetOption_Elemental(Mat A,MatOption op,PetscBool flg)
132 {
133   Mat_Elemental  *a = (Mat_Elemental*)A->data;
134 
135   PetscFunctionBegin;
136   switch (op) {
137   case MAT_NEW_NONZERO_LOCATIONS:
138   case MAT_NEW_NONZERO_LOCATION_ERR:
139   case MAT_NEW_NONZERO_ALLOCATION_ERR:
140   case MAT_ROW_ORIENTED:
141     a->roworiented = flg;
142     break;
143   case MAT_SYMMETRIC:
144     break;
145   default:
146     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]);
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "MatSetValues_Elemental"
153 static PetscErrorCode MatSetValues_Elemental(Mat A,PetscInt nr,const PetscInt *rows,PetscInt nc,const PetscInt *cols,const PetscScalar *vals,InsertMode imode)
154 {
155   Mat_Elemental  *a = (Mat_Elemental*)A->data;
156   PetscInt       i,j,rrank,ridx,crank,cidx,erow,ecol,numQueues=0;
157 
158   PetscFunctionBegin;
159   // TODO: Initialize matrix to all zeros?
160 
161   // Count the number of queues from this process
162   if (a->roworiented) {
163     for (i=0; i<nr; i++) {
164       if (rows[i] < 0) continue;
165       P2RO(A,0,rows[i],&rrank,&ridx);
166       RO2E(A,0,rrank,ridx,&erow);
167       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
168       for (j=0; j<nc; j++) {
169         if (cols[j] < 0) continue;
170         P2RO(A,1,cols[j],&crank,&cidx);
171         RO2E(A,1,crank,cidx,&ecol);
172         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
173         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
174           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
175           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
176           ++numQueues;
177           continue;
178         }
179         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
180         switch (imode) {
181         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
182         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i*nc+j]); break;
183         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
184         }
185       }
186     }
187 
188     /* printf("numQueues=%d\n",numQueues); */
189     a->emat->Reserve( numQueues );
190     for (i=0; i<nr; i++) {
191       if (rows[i] < 0) continue;
192       P2RO(A,0,rows[i],&rrank,&ridx);
193       RO2E(A,0,rrank,ridx,&erow);
194       for (j=0; j<nc; j++) {
195         if (cols[j] < 0) continue;
196         P2RO(A,1,cols[j],&crank,&cidx);
197         RO2E(A,1,crank,cidx,&ecol);
198         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
199           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
200           a->emat->QueueUpdate( erow, ecol, vals[i*nc+j] );
201         }
202       }
203     }
204   } else { /* columnoriented */
205     for (j=0; j<nc; j++) {
206       if (cols[j] < 0) continue;
207       P2RO(A,1,cols[j],&crank,&cidx);
208       RO2E(A,1,crank,cidx,&ecol);
209       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect col translation");
210       for (i=0; i<nr; i++) {
211         if (rows[i] < 0) continue;
212         P2RO(A,0,rows[i],&rrank,&ridx);
213         RO2E(A,0,rrank,ridx,&erow);
214         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_PLIB,"Incorrect row translation");
215         if (!a->emat->IsLocal(erow,ecol) ){ /* off-proc entry */
216           /* printf("Will later remotely update (%d,%d)\n",erow,ecol); */
217           if (imode != ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only ADD_VALUES to off-processor entry is supported");
218           ++numQueues;
219           continue;
220         }
221         /* printf("Locally updating (%d,%d)\n",erow,ecol); */
222         switch (imode) {
223         case INSERT_VALUES: a->emat->Set(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
224         case ADD_VALUES: a->emat->Update(erow,ecol,(PetscElemScalar)vals[i+j*nr]); break;
225         default: SETERRQ1(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for InsertMode %d",(int)imode);
226         }
227       }
228     }
229 
230     /* printf("numQueues=%d\n",numQueues); */
231     a->emat->Reserve( numQueues );
232     for (j=0; j<nc; j++) {
233       if (cols[j] < 0) continue;
234       P2RO(A,1,cols[j],&crank,&cidx);
235       RO2E(A,1,crank,cidx,&ecol);
236 
237       for (i=0; i<nr; i++) {
238         if (rows[i] < 0) continue;
239         P2RO(A,0,rows[i],&rrank,&ridx);
240         RO2E(A,0,rrank,ridx,&erow);
241         if ( !a->emat->IsLocal(erow,ecol) ) { /*off-proc entry*/
242           /* printf("Queueing remotely update of (%d,%d)\n",erow,ecol); */
243           a->emat->QueueUpdate( erow, ecol, vals[i+j*nr] );
244         }
245       }
246     }
247   }
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "MatMult_Elemental"
253 static PetscErrorCode MatMult_Elemental(Mat A,Vec X,Vec Y)
254 {
255   Mat_Elemental         *a = (Mat_Elemental*)A->data;
256   PetscErrorCode        ierr;
257   const PetscElemScalar *x;
258   PetscElemScalar       *y;
259   PetscElemScalar       one = 1,zero = 0;
260 
261   PetscFunctionBegin;
262   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
263   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
264   { /* Scoping so that constructor is called before pointer is returned */
265     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
266     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
267     ye.Attach(A->rmap->N,1,*a->grid,0,0,y,A->rmap->n);
268     El::Gemv(El::NORMAL,one,*a->emat,xe,zero,ye);
269   }
270   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
271   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
272   PetscFunctionReturn(0);
273 }
274 
275 #undef __FUNCT__
276 #define __FUNCT__ "MatMultTranspose_Elemental"
277 static PetscErrorCode MatMultTranspose_Elemental(Mat A,Vec X,Vec Y)
278 {
279   Mat_Elemental         *a = (Mat_Elemental*)A->data;
280   PetscErrorCode        ierr;
281   const PetscElemScalar *x;
282   PetscElemScalar       *y;
283   PetscElemScalar       one = 1,zero = 0;
284 
285   PetscFunctionBegin;
286   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
287   ierr = VecGetArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
288   { /* Scoping so that constructor is called before pointer is returned */
289     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ye;
290     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
291     ye.Attach(A->cmap->N,1,*a->grid,0,0,y,A->cmap->n);
292     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,zero,ye);
293   }
294   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
295   ierr = VecRestoreArray(Y,(PetscScalar **)&y);CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "MatMultAdd_Elemental"
301 static PetscErrorCode MatMultAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
302 {
303   Mat_Elemental         *a = (Mat_Elemental*)A->data;
304   PetscErrorCode        ierr;
305   const PetscElemScalar *x;
306   PetscElemScalar       *z;
307   PetscElemScalar       one = 1;
308 
309   PetscFunctionBegin;
310   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
311   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
312   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
313   { /* Scoping so that constructor is called before pointer is returned */
314     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
315     xe.LockedAttach(A->cmap->N,1,*a->grid,0,0,x,A->cmap->n);
316     ze.Attach(A->rmap->N,1,*a->grid,0,0,z,A->rmap->n);
317     El::Gemv(El::NORMAL,one,*a->emat,xe,one,ze);
318   }
319   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
320   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 #undef __FUNCT__
325 #define __FUNCT__ "MatMultTransposeAdd_Elemental"
326 static PetscErrorCode MatMultTransposeAdd_Elemental(Mat A,Vec X,Vec Y,Vec Z)
327 {
328   Mat_Elemental         *a = (Mat_Elemental*)A->data;
329   PetscErrorCode        ierr;
330   const PetscElemScalar *x;
331   PetscElemScalar       *z;
332   PetscElemScalar       one = 1;
333 
334   PetscFunctionBegin;
335   if (Y != Z) {ierr = VecCopy(Y,Z);CHKERRQ(ierr);}
336   ierr = VecGetArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
337   ierr = VecGetArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
338   { /* Scoping so that constructor is called before pointer is returned */
339     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe, ze;
340     xe.LockedAttach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
341     ze.Attach(A->cmap->N,1,*a->grid,0,0,z,A->cmap->n);
342     El::Gemv(El::TRANSPOSE,one,*a->emat,xe,one,ze);
343   }
344   ierr = VecRestoreArrayRead(X,(const PetscScalar **)&x);CHKERRQ(ierr);
345   ierr = VecRestoreArray(Z,(PetscScalar **)&z);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "MatElementalSyrk_Elemental"
351 PetscErrorCode MatElementalSyrk_Elemental(El::UpperOrLower uplo,El::Orientation orientation,PetscScalar alpha,Mat A,PetscScalar beta,Mat C,PetscBool conjugate)
352 {
353   Mat_Elemental  *a=(Mat_Elemental*)A->data,*c=(Mat_Elemental*)C->data;
354 
355   PetscFunctionBegin;
356   El::Syrk(uplo,orientation,alpha,*a->emat,beta,*c->emat,conjugate);
357   PetscFunctionReturn(0);
358 }
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "MatElementalSyrk"
362 /*@
363   MatElementalSyrk - Symmetric rank-K update:
364     updates C:= αAAT + βC or C:= αATA + βC, depending upon whether orientation is set to NORMAL or TRANSPOSE, respectively. Only the triangle of C specified by the uplo parameter is modified.
365 
366    Logically Collective on Mat
367 
368    Level: beginner
369 
370    References:
371 .      Elemental Users' Guide
372 
373 @*/
374 PetscErrorCode MatElementalSyrk(El::UpperOrLower uplo,El::Orientation orientation,PetscScalar alpha,Mat A,PetscScalar beta,Mat C,PetscBool conjugate)
375 {
376   PetscErrorCode ierr;
377 
378   PetscFunctionBegin;
379   ierr = PetscUseMethod(A,"MatElementalSyrk_C",(El::UpperOrLower,El::Orientation,PetscScalar,Mat,PetscScalar,Mat,PetscBool),(uplo,orientation,alpha,A,beta,C,conjugate));CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "MatMatMultNumeric_Elemental"
385 static PetscErrorCode MatMatMultNumeric_Elemental(Mat A,Mat B,Mat C)
386 {
387   Mat_Elemental    *a = (Mat_Elemental*)A->data;
388   Mat_Elemental    *b = (Mat_Elemental*)B->data;
389   Mat_Elemental    *c = (Mat_Elemental*)C->data;
390   PetscElemScalar  one = 1,zero = 0;
391 
392   PetscFunctionBegin;
393   { /* Scoping so that constructor is called before pointer is returned */
394     El::Gemm(El::NORMAL,El::NORMAL,one,*a->emat,*b->emat,zero,*c->emat);
395   }
396   C->assembled = PETSC_TRUE;
397   PetscFunctionReturn(0);
398 }
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "MatMatMultSymbolic_Elemental"
402 static PetscErrorCode MatMatMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
403 {
404   PetscErrorCode ierr;
405   Mat            Ce;
406   MPI_Comm       comm;
407 
408   PetscFunctionBegin;
409   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
410   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
411   ierr = MatSetSizes(Ce,A->rmap->n,B->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
412   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
413   ierr = MatSetUp(Ce);CHKERRQ(ierr);
414   *C = Ce;
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "MatMatMult_Elemental"
420 static PetscErrorCode MatMatMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
421 {
422   PetscErrorCode ierr;
423 
424   PetscFunctionBegin;
425   if (scall == MAT_INITIAL_MATRIX){
426     ierr = PetscLogEventBegin(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
427     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
428     ierr = PetscLogEventEnd(MAT_MatMultSymbolic,A,B,0,0);CHKERRQ(ierr);
429   }
430   ierr = PetscLogEventBegin(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
431   ierr = MatMatMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
432   ierr = PetscLogEventEnd(MAT_MatMultNumeric,A,B,0,0);CHKERRQ(ierr);
433   PetscFunctionReturn(0);
434 }
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "MatMatTransposeMultNumeric_Elemental"
438 static PetscErrorCode MatMatTransposeMultNumeric_Elemental(Mat A,Mat B,Mat C)
439 {
440   Mat_Elemental      *a = (Mat_Elemental*)A->data;
441   Mat_Elemental      *b = (Mat_Elemental*)B->data;
442   Mat_Elemental      *c = (Mat_Elemental*)C->data;
443   PetscElemScalar    one = 1,zero = 0;
444 
445   PetscFunctionBegin;
446   { /* Scoping so that constructor is called before pointer is returned */
447     El::Gemm(El::NORMAL,El::TRANSPOSE,one,*a->emat,*b->emat,zero,*c->emat);
448   }
449   C->assembled = PETSC_TRUE;
450   PetscFunctionReturn(0);
451 }
452 
453 #undef __FUNCT__
454 #define __FUNCT__ "MatMatTransposeMultSymbolic_Elemental"
455 static PetscErrorCode MatMatTransposeMultSymbolic_Elemental(Mat A,Mat B,PetscReal fill,Mat *C)
456 {
457   PetscErrorCode ierr;
458   Mat            Ce;
459   MPI_Comm       comm;
460 
461   PetscFunctionBegin;
462   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
463   ierr = MatCreate(comm,&Ce);CHKERRQ(ierr);
464   ierr = MatSetSizes(Ce,A->rmap->n,B->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
465   ierr = MatSetType(Ce,MATELEMENTAL);CHKERRQ(ierr);
466   ierr = MatSetUp(Ce);CHKERRQ(ierr);
467   *C = Ce;
468   PetscFunctionReturn(0);
469 }
470 
471 #undef __FUNCT__
472 #define __FUNCT__ "MatMatTransposeMult_Elemental"
473 static PetscErrorCode MatMatTransposeMult_Elemental(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
474 {
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   if (scall == MAT_INITIAL_MATRIX){
479     ierr = PetscLogEventBegin(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
480     ierr = MatMatMultSymbolic_Elemental(A,B,1.0,C);CHKERRQ(ierr);
481     ierr = PetscLogEventEnd(MAT_MatTransposeMultSymbolic,A,B,0,0);CHKERRQ(ierr);
482   }
483   ierr = PetscLogEventBegin(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
484   ierr = MatMatTransposeMultNumeric_Elemental(A,B,*C);CHKERRQ(ierr);
485   ierr = PetscLogEventEnd(MAT_MatTransposeMultNumeric,A,B,0,0);CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "MatGetDiagonal_Elemental"
491 static PetscErrorCode MatGetDiagonal_Elemental(Mat A,Vec D)
492 {
493   PetscInt        i,nrows,ncols,nD,rrank,ridx,crank,cidx;
494   Mat_Elemental   *a = (Mat_Elemental*)A->data;
495   PetscErrorCode  ierr;
496   PetscElemScalar v;
497   MPI_Comm        comm;
498 
499   PetscFunctionBegin;
500   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
501   ierr = MatGetSize(A,&nrows,&ncols);CHKERRQ(ierr);
502   nD = nrows>ncols ? ncols : nrows;
503   for (i=0; i<nD; i++) {
504     PetscInt erow,ecol;
505     P2RO(A,0,i,&rrank,&ridx);
506     RO2E(A,0,rrank,ridx,&erow);
507     if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
508     P2RO(A,1,i,&crank,&cidx);
509     RO2E(A,1,crank,cidx,&ecol);
510     if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
511     v = a->emat->Get(erow,ecol);
512     ierr = VecSetValues(D,1,&i,(PetscScalar*)&v,INSERT_VALUES);CHKERRQ(ierr);
513   }
514   ierr = VecAssemblyBegin(D);CHKERRQ(ierr);
515   ierr = VecAssemblyEnd(D);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "MatDiagonalScale_Elemental"
521 static PetscErrorCode MatDiagonalScale_Elemental(Mat X,Vec L,Vec R)
522 {
523   Mat_Elemental         *x = (Mat_Elemental*)X->data;
524   const PetscElemScalar *d;
525   PetscErrorCode        ierr;
526 
527   PetscFunctionBegin;
528   if (R) {
529     ierr = VecGetArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
530     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
531     de.LockedAttach(X->cmap->N,1,*x->grid,0,0,d,X->cmap->n);
532     El::DiagonalScale(El::RIGHT,El::NORMAL,de,*x->emat);
533     ierr = VecRestoreArrayRead(R,(const PetscScalar **)&d);CHKERRQ(ierr);
534   }
535   if (L) {
536     ierr = VecGetArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
537     El::DistMatrix<PetscElemScalar,El::VC,El::STAR> de;
538     de.LockedAttach(X->rmap->N,1,*x->grid,0,0,d,X->rmap->n);
539     El::DiagonalScale(El::LEFT,El::NORMAL,de,*x->emat);
540     ierr = VecRestoreArrayRead(L,(const PetscScalar **)&d);CHKERRQ(ierr);
541   }
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "MatScale_Elemental"
547 static PetscErrorCode MatScale_Elemental(Mat X,PetscScalar a)
548 {
549   Mat_Elemental  *x = (Mat_Elemental*)X->data;
550 
551   PetscFunctionBegin;
552   El::Scale((PetscElemScalar)a,*x->emat);
553   PetscFunctionReturn(0);
554 }
555 
556 /*
557   MatAXPY - Computes Y = a*X + Y.
558 */
559 #undef __FUNCT__
560 #define __FUNCT__ "MatAXPY_Elemental"
561 static PetscErrorCode MatAXPY_Elemental(Mat Y,PetscScalar a,Mat X,MatStructure str)
562 {
563   Mat_Elemental  *x = (Mat_Elemental*)X->data;
564   Mat_Elemental  *y = (Mat_Elemental*)Y->data;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   El::Axpy((PetscElemScalar)a,*x->emat,*y->emat);
569   ierr = PetscObjectStateIncrease((PetscObject)Y);CHKERRQ(ierr);
570   PetscFunctionReturn(0);
571 }
572 
573 #undef __FUNCT__
574 #define __FUNCT__ "MatCopy_Elemental"
575 static PetscErrorCode MatCopy_Elemental(Mat A,Mat B,MatStructure str)
576 {
577   Mat_Elemental *a=(Mat_Elemental*)A->data;
578   Mat_Elemental *b=(Mat_Elemental*)B->data;
579 
580   PetscFunctionBegin;
581   El::Copy(*a->emat,*b->emat);
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "MatDuplicate_Elemental"
587 static PetscErrorCode MatDuplicate_Elemental(Mat A,MatDuplicateOption op,Mat *B)
588 {
589   Mat            Be;
590   MPI_Comm       comm;
591   Mat_Elemental  *a=(Mat_Elemental*)A->data;
592   PetscErrorCode ierr;
593 
594   PetscFunctionBegin;
595   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
596   ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
597   ierr = MatSetSizes(Be,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
598   ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
599   ierr = MatSetUp(Be);CHKERRQ(ierr);
600   *B = Be;
601   if (op == MAT_COPY_VALUES) {
602     Mat_Elemental *b=(Mat_Elemental*)Be->data;
603     El::Copy(*a->emat,*b->emat);
604   }
605   Be->assembled = PETSC_TRUE;
606   PetscFunctionReturn(0);
607 }
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "MatTranspose_Elemental"
611 static PetscErrorCode MatTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
612 {
613   Mat            Be = *B;
614   PetscErrorCode ierr;
615   MPI_Comm       comm;
616   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
617 
618   PetscFunctionBegin;
619   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
620   /* Only out-of-place supported */
621   if (reuse == MAT_INITIAL_MATRIX){
622     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
623     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
624     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
625     ierr = MatSetUp(Be);CHKERRQ(ierr);
626     *B = Be;
627   }
628   b = (Mat_Elemental*)Be->data;
629   El::Transpose(*a->emat,*b->emat);
630   Be->assembled = PETSC_TRUE;
631   PetscFunctionReturn(0);
632 }
633 
634 #undef __FUNCT__
635 #define __FUNCT__ "MatConjugate_Elemental"
636 static PetscErrorCode MatConjugate_Elemental(Mat A)
637 {
638   Mat_Elemental  *a = (Mat_Elemental*)A->data;
639 
640   PetscFunctionBegin;
641   El::Conjugate(*a->emat);
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "MatHermitianTranspose_Elemental"
647 static PetscErrorCode MatHermitianTranspose_Elemental(Mat A,MatReuse reuse,Mat *B)
648 {
649   Mat            Be = *B;
650   PetscErrorCode ierr;
651   MPI_Comm       comm;
652   Mat_Elemental  *a = (Mat_Elemental*)A->data, *b;
653 
654   PetscFunctionBegin;
655   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
656   /* Only out-of-place supported */
657   if (reuse == MAT_INITIAL_MATRIX){
658     ierr = MatCreate(comm,&Be);CHKERRQ(ierr);
659     ierr = MatSetSizes(Be,A->cmap->n,A->rmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
660     ierr = MatSetType(Be,MATELEMENTAL);CHKERRQ(ierr);
661     ierr = MatSetUp(Be);CHKERRQ(ierr);
662     *B = Be;
663   }
664   b = (Mat_Elemental*)Be->data;
665   El::Adjoint(*a->emat,*b->emat);
666   Be->assembled = PETSC_TRUE;
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "MatSolve_Elemental"
672 static PetscErrorCode MatSolve_Elemental(Mat A,Vec B,Vec X)
673 {
674   Mat_Elemental     *a = (Mat_Elemental*)A->data;
675   PetscErrorCode    ierr;
676   PetscElemScalar   *x;
677 
678   PetscFunctionBegin;
679   ierr = VecCopy(B,X);CHKERRQ(ierr);
680   ierr = VecGetArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
681   El::DistMatrix<PetscElemScalar,El::VC,El::STAR> xe;
682   xe.Attach(A->rmap->N,1,*a->grid,0,0,x,A->rmap->n);
683   El::DistMatrix<PetscElemScalar,El::MC,El::MR> xer(xe);
684   switch (A->factortype) {
685   case MAT_FACTOR_LU:
686     if ((*a->pivot).AllocatedMemory()) {
687       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,xer);
688       El::Copy(xer,xe);
689     } else {
690       El::lu::SolveAfter(El::NORMAL,*a->emat,xer);
691       El::Copy(xer,xe);
692     }
693     break;
694   case MAT_FACTOR_CHOLESKY:
695     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,xer);
696     El::Copy(xer,xe);
697     break;
698   default:
699     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
700     break;
701   }
702   ierr = VecRestoreArray(X,(PetscScalar **)&x);CHKERRQ(ierr);
703   PetscFunctionReturn(0);
704 }
705 
706 #undef __FUNCT__
707 #define __FUNCT__ "MatSolveAdd_Elemental"
708 static PetscErrorCode MatSolveAdd_Elemental(Mat A,Vec B,Vec Y,Vec X)
709 {
710   PetscErrorCode    ierr;
711 
712   PetscFunctionBegin;
713   ierr = MatSolve_Elemental(A,B,X);CHKERRQ(ierr);
714   ierr = VecAXPY(X,1,Y);CHKERRQ(ierr);
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "MatMatSolve_Elemental"
720 static PetscErrorCode MatMatSolve_Elemental(Mat A,Mat B,Mat X)
721 {
722   Mat_Elemental *a=(Mat_Elemental*)A->data;
723   Mat_Elemental *b=(Mat_Elemental*)B->data;
724   Mat_Elemental *x=(Mat_Elemental*)X->data;
725 
726   PetscFunctionBegin;
727   El::Copy(*b->emat,*x->emat);
728   switch (A->factortype) {
729   case MAT_FACTOR_LU:
730     if ((*a->pivot).AllocatedMemory()) {
731       El::lu::SolveAfter(El::NORMAL,*a->emat,*a->pivot,*x->emat);
732     } else {
733       El::lu::SolveAfter(El::NORMAL,*a->emat,*x->emat);
734     }
735     break;
736   case MAT_FACTOR_CHOLESKY:
737     El::cholesky::SolveAfter(El::UPPER,El::NORMAL,*a->emat,*x->emat);
738     break;
739   default:
740     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unfactored Matrix or Unsupported MatFactorType");
741     break;
742   }
743   PetscFunctionReturn(0);
744 }
745 
746 #undef __FUNCT__
747 #define __FUNCT__ "MatLUFactor_Elemental"
748 static PetscErrorCode MatLUFactor_Elemental(Mat A,IS row,IS col,const MatFactorInfo *info)
749 {
750   Mat_Elemental  *a = (Mat_Elemental*)A->data;
751   PetscErrorCode ierr;
752 
753   PetscFunctionBegin;
754   if (info->dtcol){
755     El::LU(*a->emat,*a->pivot);
756   } else {
757     El::LU(*a->emat);
758   }
759   A->factortype = MAT_FACTOR_LU;
760   A->assembled  = PETSC_TRUE;
761 
762   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
763   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "MatLUFactorNumeric_Elemental"
769 static PetscErrorCode  MatLUFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
770 {
771   PetscErrorCode ierr;
772 
773   PetscFunctionBegin;
774   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
775   ierr = MatLUFactor_Elemental(F,0,0,info);CHKERRQ(ierr);
776   PetscFunctionReturn(0);
777 }
778 
779 #undef __FUNCT__
780 #define __FUNCT__ "MatLUFactorSymbolic_Elemental"
781 static PetscErrorCode  MatLUFactorSymbolic_Elemental(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info)
782 {
783   PetscFunctionBegin;
784   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "MatCholeskyFactor_Elemental"
790 static PetscErrorCode MatCholeskyFactor_Elemental(Mat A,IS perm,const MatFactorInfo *info)
791 {
792   Mat_Elemental  *a = (Mat_Elemental*)A->data;
793   El::DistMatrix<PetscElemScalar,El::MC,El::STAR> d;
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   El::Cholesky(El::UPPER,*a->emat);
798   A->factortype = MAT_FACTOR_CHOLESKY;
799   A->assembled  = PETSC_TRUE;
800 
801   ierr = PetscFree(A->solvertype);CHKERRQ(ierr);
802   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&A->solvertype);CHKERRQ(ierr);
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "MatCholeskyFactorNumeric_Elemental"
808 static PetscErrorCode MatCholeskyFactorNumeric_Elemental(Mat F,Mat A,const MatFactorInfo *info)
809 {
810   PetscErrorCode ierr;
811 
812   PetscFunctionBegin;
813   ierr = MatCopy(A,F,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
814   ierr = MatCholeskyFactor_Elemental(F,0,info);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "MatCholeskyFactorSymbolic_Elemental"
820 static PetscErrorCode MatCholeskyFactorSymbolic_Elemental(Mat F,Mat A,IS perm,const MatFactorInfo *info)
821 {
822   PetscFunctionBegin;
823   /* F is create and allocated by MatGetFactor_elemental_petsc(), skip this routine. */
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "MatFactorGetSolverPackage_elemental_elemental"
829 PetscErrorCode MatFactorGetSolverPackage_elemental_elemental(Mat A,const MatSolverPackage *type)
830 {
831   PetscFunctionBegin;
832   *type = MATSOLVERELEMENTAL;
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "MatGetFactor_elemental_elemental"
838 static PetscErrorCode MatGetFactor_elemental_elemental(Mat A,MatFactorType ftype,Mat *F)
839 {
840   Mat            B;
841   PetscErrorCode ierr;
842 
843   PetscFunctionBegin;
844   /* Create the factorization matrix */
845   ierr = MatCreate(PetscObjectComm((PetscObject)A),&B);CHKERRQ(ierr);
846   ierr = MatSetSizes(B,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
847   ierr = MatSetType(B,MATELEMENTAL);CHKERRQ(ierr);
848   ierr = MatSetUp(B);CHKERRQ(ierr);
849   B->factortype = ftype;
850   ierr = PetscFree(B->solvertype);CHKERRQ(ierr);
851   ierr = PetscStrallocpy(MATSOLVERELEMENTAL,&B->solvertype);CHKERRQ(ierr);
852 
853   ierr = PetscObjectComposeFunction((PetscObject)B,"MatFactorGetSolverPackage_C",MatFactorGetSolverPackage_elemental_elemental);CHKERRQ(ierr);
854   *F            = B;
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "MatSolverPackageRegister_Elemental"
860 PETSC_EXTERN PetscErrorCode MatSolverPackageRegister_Elemental(void)
861 {
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_LU,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
866   ierr = MatSolverPackageRegister(MATSOLVERELEMENTAL,MATELEMENTAL,        MAT_FACTOR_CHOLESKY,MatGetFactor_elemental_elemental);CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 #undef __FUNCT__
871 #define __FUNCT__ "MatNorm_Elemental"
872 static PetscErrorCode MatNorm_Elemental(Mat A,NormType type,PetscReal *nrm)
873 {
874   Mat_Elemental *a=(Mat_Elemental*)A->data;
875 
876   PetscFunctionBegin;
877   switch (type){
878   case NORM_1:
879     *nrm = El::OneNorm(*a->emat);
880     break;
881   case NORM_FROBENIUS:
882     *nrm = El::FrobeniusNorm(*a->emat);
883     break;
884   case NORM_INFINITY:
885     *nrm = El::InfinityNorm(*a->emat);
886     break;
887   default:
888     printf("Error: unsupported norm type!\n");
889   }
890   PetscFunctionReturn(0);
891 }
892 
893 #undef __FUNCT__
894 #define __FUNCT__ "MatZeroEntries_Elemental"
895 static PetscErrorCode MatZeroEntries_Elemental(Mat A)
896 {
897   Mat_Elemental *a=(Mat_Elemental*)A->data;
898 
899   PetscFunctionBegin;
900   El::Zero(*a->emat);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "MatGetOwnershipIS_Elemental"
906 static PetscErrorCode MatGetOwnershipIS_Elemental(Mat A,IS *rows,IS *cols)
907 {
908   Mat_Elemental  *a = (Mat_Elemental*)A->data;
909   PetscErrorCode ierr;
910   PetscInt       i,m,shift,stride,*idx;
911 
912   PetscFunctionBegin;
913   if (rows) {
914     m = a->emat->LocalHeight();
915     shift = a->emat->ColShift();
916     stride = a->emat->ColStride();
917     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
918     for (i=0; i<m; i++) {
919       PetscInt rank,offset;
920       E2RO(A,0,shift+i*stride,&rank,&offset);
921       RO2P(A,0,rank,offset,&idx[i]);
922     }
923     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,rows);CHKERRQ(ierr);
924   }
925   if (cols) {
926     m = a->emat->LocalWidth();
927     shift = a->emat->RowShift();
928     stride = a->emat->RowStride();
929     ierr = PetscMalloc1(m,&idx);CHKERRQ(ierr);
930     for (i=0; i<m; i++) {
931       PetscInt rank,offset;
932       E2RO(A,1,shift+i*stride,&rank,&offset);
933       RO2P(A,1,rank,offset,&idx[i]);
934     }
935     ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx,PETSC_OWN_POINTER,cols);CHKERRQ(ierr);
936   }
937   PetscFunctionReturn(0);
938 }
939 
940 #undef __FUNCT__
941 #define __FUNCT__ "MatConvert_Elemental_Dense"
942 static PetscErrorCode MatConvert_Elemental_Dense(Mat A,MatType newtype,MatReuse reuse,Mat *B)
943 {
944   Mat                Bmpi;
945   Mat_Elemental      *a = (Mat_Elemental*)A->data;
946   MPI_Comm           comm;
947   PetscErrorCode     ierr;
948   IS                 isrows,iscols;
949   PetscInt           rrank,ridx,crank,cidx,nrows,ncols,i,j,erow,ecol,elrow,elcol;
950   const PetscInt     *rows,*cols;
951   PetscElemScalar    v;
952   const El::Grid     &grid = a->emat->Grid();
953 
954   PetscFunctionBegin;
955   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
956 
957   if (reuse == MAT_REUSE_MATRIX) {
958     Bmpi = *B;
959   } else {
960     ierr = MatCreate(comm,&Bmpi);CHKERRQ(ierr);
961     ierr = MatSetSizes(Bmpi,A->rmap->n,A->cmap->n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
962     ierr = MatSetType(Bmpi,MATDENSE);CHKERRQ(ierr);
963     ierr = MatSetUp(Bmpi);CHKERRQ(ierr);
964   }
965 
966   /* Get local entries of A */
967   ierr = MatGetOwnershipIS(A,&isrows,&iscols);CHKERRQ(ierr);
968   ierr = ISGetLocalSize(isrows,&nrows);CHKERRQ(ierr);
969   ierr = ISGetIndices(isrows,&rows);CHKERRQ(ierr);
970   ierr = ISGetLocalSize(iscols,&ncols);CHKERRQ(ierr);
971   ierr = ISGetIndices(iscols,&cols);CHKERRQ(ierr);
972 
973   if (a->roworiented) {
974     for (i=0; i<nrows; i++) {
975       P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
976       RO2E(A,0,rrank,ridx,&erow);
977       if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
978       for (j=0; j<ncols; j++) {
979         P2RO(A,1,cols[j],&crank,&cidx);
980         RO2E(A,1,crank,cidx,&ecol);
981         if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
982 
983         elrow = erow / grid.MCSize(); /* Elemental local row index */
984         elcol = ecol / grid.MRSize(); /* Elemental local column index */
985         v = a->emat->GetLocal(elrow,elcol);
986         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
987       }
988     }
989   } else { /* column-oriented */
990     for (j=0; j<ncols; j++) {
991       P2RO(A,1,cols[j],&crank,&cidx);
992       RO2E(A,1,crank,cidx,&ecol);
993       if (crank < 0 || cidx < 0 || ecol < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect col translation");
994       for (i=0; i<nrows; i++) {
995         P2RO(A,0,rows[i],&rrank,&ridx); /* convert indices between PETSc <-> (Rank,Offset) <-> Elemental */
996         RO2E(A,0,rrank,ridx,&erow);
997         if (rrank < 0 || ridx < 0 || erow < 0) SETERRQ(comm,PETSC_ERR_PLIB,"Incorrect row translation");
998 
999         elrow = erow / grid.MCSize(); /* Elemental local row index */
1000         elcol = ecol / grid.MRSize(); /* Elemental local column index */
1001         v = a->emat->GetLocal(elrow,elcol);
1002         ierr = MatSetValues(Bmpi,1,&rows[i],1,&cols[j],(PetscScalar *)&v,INSERT_VALUES);CHKERRQ(ierr);
1003       }
1004     }
1005   }
1006   ierr = MatAssemblyBegin(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1007   ierr = MatAssemblyEnd(Bmpi,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1008   if (reuse == MAT_INPLACE_MATRIX) {
1009     ierr = MatHeaderReplace(A,&Bmpi);CHKERRQ(ierr);
1010   } else {
1011     *B = Bmpi;
1012   }
1013   ierr = ISDestroy(&isrows);CHKERRQ(ierr);
1014   ierr = ISDestroy(&iscols);CHKERRQ(ierr);
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "MatConvert_SeqAIJ_Elemental"
1020 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1021 {
1022   Mat               mat_elemental;
1023   PetscErrorCode    ierr;
1024   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols;
1025   const PetscInt    *cols;
1026   const PetscScalar *vals;
1027 
1028   PetscFunctionBegin;
1029   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1030   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1031   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1032   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1033   for (row=0; row<M; row++) {
1034     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1035     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1036     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1037     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1038   }
1039   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1040   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1041 
1042   if (reuse == MAT_INPLACE_MATRIX) {
1043     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1044   } else {
1045     *newmat = mat_elemental;
1046   }
1047   PetscFunctionReturn(0);
1048 }
1049 
1050 #undef __FUNCT__
1051 #define __FUNCT__ "MatConvert_MPIAIJ_Elemental"
1052 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1053 {
1054   Mat               mat_elemental;
1055   PetscErrorCode    ierr;
1056   PetscInt          row,ncols,rstart=A->rmap->rstart,rend=A->rmap->rend,j;
1057   const PetscInt    *cols;
1058   const PetscScalar *vals;
1059 
1060   PetscFunctionBegin;
1061   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1062   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1063   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1064   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1065   for (row=rstart; row<rend; row++) {
1066     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1067     for (j=0; j<ncols; j++) {
1068       /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1069       ierr = MatSetValues(mat_elemental,1,&row,1,&cols[j],&vals[j],ADD_VALUES);CHKERRQ(ierr);
1070     }
1071     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1072   }
1073   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1074   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1075 
1076   if (reuse == MAT_INPLACE_MATRIX) {
1077     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1078   } else {
1079     *newmat = mat_elemental;
1080   }
1081   PetscFunctionReturn(0);
1082 }
1083 
1084 #undef __FUNCT__
1085 #define __FUNCT__ "MatConvert_SeqSBAIJ_Elemental"
1086 PETSC_INTERN PetscErrorCode MatConvert_SeqSBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1087 {
1088   Mat               mat_elemental;
1089   PetscErrorCode    ierr;
1090   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j;
1091   const PetscInt    *cols;
1092   const PetscScalar *vals;
1093 
1094   PetscFunctionBegin;
1095   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1096   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1097   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1098   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1099   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1100   for (row=0; row<M; row++) {
1101     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1102     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1103     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1104     for (j=0; j<ncols; j++) { /* lower triangular part */
1105       if (cols[j] == row) continue;
1106       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1107     }
1108     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1109   }
1110   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1111   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1112   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1113 
1114   if (reuse == MAT_INPLACE_MATRIX) {
1115     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1116   } else {
1117     *newmat = mat_elemental;
1118   }
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 #undef __FUNCT__
1123 #define __FUNCT__ "MatConvert_MPISBAIJ_Elemental"
1124 PETSC_INTERN PetscErrorCode MatConvert_MPISBAIJ_Elemental(Mat A, MatType newtype,MatReuse reuse,Mat *newmat)
1125 {
1126   Mat               mat_elemental;
1127   PetscErrorCode    ierr;
1128   PetscInt          M=A->rmap->N,N=A->cmap->N,row,ncols,j,rstart=A->rmap->rstart,rend=A->rmap->rend;
1129   const PetscInt    *cols;
1130   const PetscScalar *vals;
1131 
1132   PetscFunctionBegin;
1133   ierr = MatCreate(PetscObjectComm((PetscObject)A), &mat_elemental);CHKERRQ(ierr);
1134   ierr = MatSetSizes(mat_elemental,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1135   ierr = MatSetType(mat_elemental,MATELEMENTAL);CHKERRQ(ierr);
1136   ierr = MatSetUp(mat_elemental);CHKERRQ(ierr);
1137   ierr = MatGetRowUpperTriangular(A);CHKERRQ(ierr);
1138   for (row=rstart; row<rend; row++) {
1139     ierr = MatGetRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1140     /* PETSc-Elemental interaface uses axpy for setting off-processor entries, only ADD_VALUES is allowed */
1141     ierr = MatSetValues(mat_elemental,1,&row,ncols,cols,vals,ADD_VALUES);CHKERRQ(ierr);
1142     for (j=0; j<ncols; j++) { /* lower triangular part */
1143       if (cols[j] == row) continue;
1144       ierr = MatSetValues(mat_elemental,1,&cols[j],1,&row,&vals[j],ADD_VALUES);CHKERRQ(ierr);
1145     }
1146     ierr = MatRestoreRow(A,row,&ncols,&cols,&vals);CHKERRQ(ierr);
1147   }
1148   ierr = MatRestoreRowUpperTriangular(A);CHKERRQ(ierr);
1149   ierr = MatAssemblyBegin(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1150   ierr = MatAssemblyEnd(mat_elemental, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1151 
1152   if (reuse == MAT_INPLACE_MATRIX) {
1153     ierr = MatHeaderReplace(A,&mat_elemental);CHKERRQ(ierr);
1154   } else {
1155     *newmat = mat_elemental;
1156   }
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "MatDestroy_Elemental"
1162 static PetscErrorCode MatDestroy_Elemental(Mat A)
1163 {
1164   Mat_Elemental      *a = (Mat_Elemental*)A->data;
1165   PetscErrorCode     ierr;
1166   Mat_Elemental_Grid *commgrid;
1167   PetscBool          flg;
1168   MPI_Comm           icomm;
1169 
1170   PetscFunctionBegin;
1171   delete a->emat;
1172   delete a->pivot;
1173 
1174   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1175   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1176   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1177   if (--commgrid->grid_refct == 0) {
1178     delete commgrid->grid;
1179     ierr = PetscFree(commgrid);CHKERRQ(ierr);
1180     ierr = MPI_Keyval_free(&Petsc_Elemental_keyval);CHKERRQ(ierr);
1181   }
1182   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1183   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",NULL);CHKERRQ(ierr);
1184   ierr = PetscObjectComposeFunction((PetscObject)A,"MatFactorGetSolverPackage_C",NULL);CHKERRQ(ierr);
1185   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",NULL);CHKERRQ(ierr);
1186   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalSyrk_C",NULL);CHKERRQ(ierr);
1187   ierr = PetscFree(A->data);CHKERRQ(ierr);
1188   PetscFunctionReturn(0);
1189 }
1190 
1191 #undef __FUNCT__
1192 #define __FUNCT__ "MatSetUp_Elemental"
1193 PetscErrorCode MatSetUp_Elemental(Mat A)
1194 {
1195   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1196   PetscErrorCode ierr;
1197   PetscMPIInt    rsize,csize;
1198 
1199   PetscFunctionBegin;
1200   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1201   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1202 
1203   a->emat->Resize(A->rmap->N,A->cmap->N);CHKERRQ(ierr);
1204   El::Zero(*a->emat);
1205 
1206   ierr = MPI_Comm_size(A->rmap->comm,&rsize);CHKERRQ(ierr);
1207   ierr = MPI_Comm_size(A->cmap->comm,&csize);CHKERRQ(ierr);
1208   if (csize != rsize) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Cannot use row and column communicators of different sizes");
1209   a->commsize = rsize;
1210   a->mr[0] = A->rmap->N % rsize; if (!a->mr[0]) a->mr[0] = rsize;
1211   a->mr[1] = A->cmap->N % csize; if (!a->mr[1]) a->mr[1] = csize;
1212   a->m[0]  = A->rmap->N / rsize + (a->mr[0] != rsize);
1213   a->m[1]  = A->cmap->N / csize + (a->mr[1] != csize);
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 #undef __FUNCT__
1218 #define __FUNCT__ "MatAssemblyBegin_Elemental"
1219 PetscErrorCode MatAssemblyBegin_Elemental(Mat A, MatAssemblyType type)
1220 {
1221   Mat_Elemental  *a = (Mat_Elemental*)A->data;
1222 
1223   PetscFunctionBegin;
1224   /* printf("Calling ProcessQueues\n"); */
1225   a->emat->ProcessQueues();
1226   /* printf("Finished ProcessQueues\n"); */
1227   PetscFunctionReturn(0);
1228 }
1229 
1230 #undef __FUNCT__
1231 #define __FUNCT__ "MatAssemblyEnd_Elemental"
1232 PetscErrorCode MatAssemblyEnd_Elemental(Mat A, MatAssemblyType type)
1233 {
1234   PetscFunctionBegin;
1235   /* Currently does nothing */
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "MatLoad_Elemental"
1241 PetscErrorCode MatLoad_Elemental(Mat newMat, PetscViewer viewer)
1242 {
1243   PetscErrorCode ierr;
1244   Mat            Adense,Ae;
1245   MPI_Comm       comm;
1246 
1247   PetscFunctionBegin;
1248   ierr = PetscObjectGetComm((PetscObject)newMat,&comm);CHKERRQ(ierr);
1249   ierr = MatCreate(comm,&Adense);CHKERRQ(ierr);
1250   ierr = MatSetType(Adense,MATDENSE);CHKERRQ(ierr);
1251   ierr = MatLoad(Adense,viewer);CHKERRQ(ierr);
1252   ierr = MatConvert(Adense, MATELEMENTAL, MAT_INITIAL_MATRIX,&Ae);CHKERRQ(ierr);
1253   ierr = MatDestroy(&Adense);CHKERRQ(ierr);
1254   ierr = MatHeaderReplace(newMat,&Ae);CHKERRQ(ierr);
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "MatElementalHermitianGenDefEig_Elemental"
1260 PetscErrorCode MatElementalHermitianGenDefEig_Elemental(El::Pencil eigtype,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1261 {
1262   PetscErrorCode ierr;
1263   Mat_Elemental  *a=(Mat_Elemental*)A->data,*b=(Mat_Elemental*)B->data,*x;
1264   MPI_Comm       comm;
1265   Mat            EVAL;
1266   Mat_Elemental  *e;
1267 
1268   PetscFunctionBegin;
1269   /* Compute eigenvalues and eigenvectors */
1270   El::DistMatrix<PetscElemScalar,El::VR,El::STAR> w( *a->grid ); /* holding eigenvalues */
1271   El::DistMatrix<PetscElemScalar>                 X( *a->grid ); /* holding eigenvectors */
1272   El::HermitianGenDefEig(eigtype,uplo,*a->emat,*b->emat,w,X,sort,subset,ctrl);
1273 
1274   /* Wrap w and X into PETSc's MATMATELEMENTAL matrices */
1275   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
1276   ierr = MatCreate(comm,evec);CHKERRQ(ierr);
1277   ierr = MatSetSizes(*evec,PETSC_DECIDE,PETSC_DECIDE,X.Height(),X.Width());CHKERRQ(ierr);
1278   ierr = MatSetType(*evec,MATELEMENTAL);CHKERRQ(ierr);
1279   ierr = MatSetFromOptions(*evec);CHKERRQ(ierr);
1280   ierr = MatSetUp(*evec);CHKERRQ(ierr);
1281   ierr = MatAssemblyBegin(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1282   ierr = MatAssemblyEnd(*evec,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1283 
1284   x = (Mat_Elemental*)(*evec)->data;
1285   *x->emat = X;
1286 
1287   ierr = MatCreate(comm,&EVAL);CHKERRQ(ierr);
1288   ierr = MatSetSizes(EVAL,PETSC_DECIDE,PETSC_DECIDE,w.Height(),w.Width());CHKERRQ(ierr);
1289   ierr = MatSetType(EVAL,MATELEMENTAL);CHKERRQ(ierr);
1290   ierr = MatSetFromOptions(EVAL);CHKERRQ(ierr);
1291   ierr = MatSetUp(EVAL);CHKERRQ(ierr);
1292   ierr = MatAssemblyBegin(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1293   ierr = MatAssemblyEnd(EVAL,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1294   e         = (Mat_Elemental*)EVAL->data;
1295   *e->emat = w;
1296   *evals   = EVAL;
1297   PetscFunctionReturn(0);
1298 }
1299 
1300 #undef __FUNCT__
1301 #define __FUNCT__ "MatElementalHermitianGenDefEig"
1302 /*@
1303   MatElementalHermitianGenDefEig - Compute the set of eigenvalues of the Hermitian-definite matrix pencil determined by the subset structure
1304 
1305    Logically Collective on Mat
1306 
1307    Level: beginner
1308 
1309    References:
1310 .      Elemental Users' Guide
1311 
1312 @*/
1313 PetscErrorCode MatElementalHermitianGenDefEig(El::Pencil type,El::UpperOrLower uplo,Mat A,Mat B,Mat *evals,Mat *evec,El::SortType sort,El::HermitianEigSubset<PetscElemScalar> subset,const El::HermitianEigCtrl<PetscElemScalar> ctrl)
1314 {
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   ierr = PetscUseMethod(A,"MatElementalHermitianGenDefEig_C",(El::Pencil,El::UpperOrLower,Mat,Mat,Mat*,Mat*,El::SortType,El::HermitianEigSubset<PetscElemScalar>,const El::HermitianEigCtrl<PetscElemScalar>),(type,uplo,A,B,evals,evec,sort,subset,ctrl));CHKERRQ(ierr);
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 /* -------------------------------------------------------------------*/
1323 static struct _MatOps MatOps_Values = {
1324        MatSetValues_Elemental,
1325        0,
1326        0,
1327        MatMult_Elemental,
1328 /* 4*/ MatMultAdd_Elemental,
1329        MatMultTranspose_Elemental,
1330        MatMultTransposeAdd_Elemental,
1331        MatSolve_Elemental,
1332        MatSolveAdd_Elemental,
1333        0,
1334 /*10*/ 0,
1335        MatLUFactor_Elemental,
1336        MatCholeskyFactor_Elemental,
1337        0,
1338        MatTranspose_Elemental,
1339 /*15*/ MatGetInfo_Elemental,
1340        0,
1341        MatGetDiagonal_Elemental,
1342        MatDiagonalScale_Elemental,
1343        MatNorm_Elemental,
1344 /*20*/ MatAssemblyBegin_Elemental,
1345        MatAssemblyEnd_Elemental,
1346        MatSetOption_Elemental,
1347        MatZeroEntries_Elemental,
1348 /*24*/ 0,
1349        MatLUFactorSymbolic_Elemental,
1350        MatLUFactorNumeric_Elemental,
1351        MatCholeskyFactorSymbolic_Elemental,
1352        MatCholeskyFactorNumeric_Elemental,
1353 /*29*/ MatSetUp_Elemental,
1354        0,
1355        0,
1356        0,
1357        0,
1358 /*34*/ MatDuplicate_Elemental,
1359        0,
1360        0,
1361        0,
1362        0,
1363 /*39*/ MatAXPY_Elemental,
1364        0,
1365        0,
1366        0,
1367        MatCopy_Elemental,
1368 /*44*/ 0,
1369        MatScale_Elemental,
1370        MatShift_Basic,
1371        0,
1372        0,
1373 /*49*/ 0,
1374        0,
1375        0,
1376        0,
1377        0,
1378 /*54*/ 0,
1379        0,
1380        0,
1381        0,
1382        0,
1383 /*59*/ 0,
1384        MatDestroy_Elemental,
1385        MatView_Elemental,
1386        0,
1387        0,
1388 /*64*/ 0,
1389        0,
1390        0,
1391        0,
1392        0,
1393 /*69*/ 0,
1394        0,
1395        MatConvert_Elemental_Dense,
1396        0,
1397        0,
1398 /*74*/ 0,
1399        0,
1400        0,
1401        0,
1402        0,
1403 /*79*/ 0,
1404        0,
1405        0,
1406        0,
1407        MatLoad_Elemental,
1408 /*84*/ 0,
1409        0,
1410        0,
1411        0,
1412        0,
1413 /*89*/ MatMatMult_Elemental,
1414        MatMatMultSymbolic_Elemental,
1415        MatMatMultNumeric_Elemental,
1416        0,
1417        0,
1418 /*94*/ 0,
1419        MatMatTransposeMult_Elemental,
1420        MatMatTransposeMultSymbolic_Elemental,
1421        MatMatTransposeMultNumeric_Elemental,
1422        0,
1423 /*99*/ 0,
1424        0,
1425        0,
1426        MatConjugate_Elemental,
1427        0,
1428 /*104*/0,
1429        0,
1430        0,
1431        0,
1432        0,
1433 /*109*/MatMatSolve_Elemental,
1434        0,
1435        0,
1436        0,
1437        0,
1438 /*114*/0,
1439        0,
1440        0,
1441        0,
1442        0,
1443 /*119*/0,
1444        MatHermitianTranspose_Elemental,
1445        0,
1446        0,
1447        0,
1448 /*124*/0,
1449        0,
1450        0,
1451        0,
1452        0,
1453 /*129*/0,
1454        0,
1455        0,
1456        0,
1457        0,
1458 /*134*/0,
1459        0,
1460        0,
1461        0,
1462        0
1463 };
1464 
1465 /*MC
1466    MATELEMENTAL = "elemental" - A matrix type for dense matrices using the Elemental package
1467 
1468   Use ./configure --download-elemental to install PETSc to use Elemental
1469 
1470   Use -pc_type lu -pc_factor_mat_solver_package elemental to us this direct solver
1471 
1472    Options Database Keys:
1473 + -mat_type elemental - sets the matrix type to "elemental" during a call to MatSetFromOptions()
1474 - -mat_elemental_grid_height - sets Grid Height for 2D cyclic ordering of internal matrix
1475 
1476   Level: beginner
1477 
1478 .seealso: MATDENSE
1479 M*/
1480 
1481 #undef __FUNCT__
1482 #define __FUNCT__ "MatCreate_Elemental"
1483 PETSC_EXTERN PetscErrorCode MatCreate_Elemental(Mat A)
1484 {
1485   Mat_Elemental      *a;
1486   PetscErrorCode     ierr;
1487   PetscBool          flg,flg1;
1488   Mat_Elemental_Grid *commgrid;
1489   MPI_Comm           icomm;
1490   PetscInt           optv1;
1491 
1492   PetscFunctionBegin;
1493   ierr = PetscElementalInitializePackage();CHKERRQ(ierr);
1494   ierr = PetscMemcpy(A->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
1495   A->insertmode = NOT_SET_VALUES;
1496 
1497   ierr = PetscNewLog(A,&a);CHKERRQ(ierr);
1498   A->data = (void*)a;
1499 
1500   /* Set up the elemental matrix */
1501   El::mpi::Comm cxxcomm(PetscObjectComm((PetscObject)A));
1502 
1503   /* Grid needs to be shared between multiple Mats on the same communicator, implement by attribute caching on the MPI_Comm */
1504   if (Petsc_Elemental_keyval == MPI_KEYVAL_INVALID) {
1505     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0);CHKERRQ(ierr);
1506     /* ierr = MPI_Comm_create_Keyval(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Elemental_keyval,(void*)0); -- new version? */
1507   }
1508   ierr = PetscCommDuplicate(cxxcomm.comm,&icomm,NULL);CHKERRQ(ierr);
1509   ierr = MPI_Attr_get(icomm,Petsc_Elemental_keyval,(void**)&commgrid,(int*)&flg);CHKERRQ(ierr);
1510   if (!flg) {
1511     ierr = PetscNewLog(A,&commgrid);CHKERRQ(ierr);
1512 
1513     ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)A),((PetscObject)A)->prefix,"Elemental Options","Mat");CHKERRQ(ierr);
1514     /* displayed default grid sizes (CommSize,1) are set by us arbitrarily until El::Grid() is called */
1515     ierr = PetscOptionsInt("-mat_elemental_grid_height","Grid Height","None",El::mpi::Size(cxxcomm),&optv1,&flg1);CHKERRQ(ierr);
1516     if (flg1) {
1517       if (El::mpi::Size(cxxcomm) % optv1 != 0) {
1518         SETERRQ2(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"Grid Height %D must evenly divide CommSize %D",optv1,(PetscInt)El::mpi::Size(cxxcomm));
1519       }
1520       commgrid->grid = new El::Grid(cxxcomm,optv1); /* use user-provided grid height */
1521     } else {
1522       commgrid->grid = new El::Grid(cxxcomm); /* use Elemental default grid sizes */
1523       /* printf("new commgrid->grid = %p\n",commgrid->grid);  -- memory leak revealed by valgrind? */
1524     }
1525     commgrid->grid_refct = 1;
1526     ierr = MPI_Attr_put(icomm,Petsc_Elemental_keyval,(void*)commgrid);CHKERRQ(ierr);
1527     ierr = PetscOptionsEnd();CHKERRQ(ierr);
1528   } else {
1529     commgrid->grid_refct++;
1530   }
1531   ierr = PetscCommDestroy(&icomm);CHKERRQ(ierr);
1532   a->grid      = commgrid->grid;
1533   a->emat      = new El::DistMatrix<PetscElemScalar>(*a->grid);
1534   a->pivot     = new El::DistMatrix<PetscInt,El::VC,El::STAR>(*a->grid);
1535   a->roworiented = PETSC_TRUE;
1536 
1537   ierr = PetscObjectComposeFunction((PetscObject)A,"MatGetOwnershipIS_C",MatGetOwnershipIS_Elemental);CHKERRQ(ierr);
1538   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalHermitianGenDefEig_C",MatElementalHermitianGenDefEig_Elemental);CHKERRQ(ierr);
1539   ierr = PetscObjectComposeFunction((PetscObject)A,"MatElementalSyrk_C",MatElementalSyrk_Elemental);CHKERRQ(ierr);
1540 
1541   ierr = PetscObjectChangeTypeName((PetscObject)A,MATELEMENTAL);CHKERRQ(ierr);
1542   PetscFunctionReturn(0);
1543 }
1544