xref: /petsc/src/dm/impls/moab/dmmbutil.cxx (revision c8d5365d6c6c3fe4d8ab5d2f7a087b41450baa0c)
1 #include <petsc-private/dmmbimpl.h> /*I  "petscdm.h"   I*/
2 #include <petsc-private/vecimpl.h> /*I  "petscdm.h"   I*/
3 
4 #include <petscdmmoab.h>
5 #include <MBTagConventions.hpp>
6 #include <moab/ReadUtilIface.hpp>
7 #include <moab/ScdInterface.hpp>
8 #include <moab/CN.hpp>
9 
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMMoabComputeDomainBounds_Private"
13 PetscErrorCode DMMoabComputeDomainBounds_Private(moab::ParallelComm* pcomm, PetscInt dim, PetscInt neleglob, PetscInt *ise)
14 {
15   PetscInt size,rank;
16   PetscInt fraction,remainder;
17   PetscInt starts[3],sizes[3];
18 
19   PetscFunctionBegin;
20   if(dim<1 && dim>3) SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"The problem dimension is invalid: %D",dim);
21 
22   size=pcomm->size();
23   rank=pcomm->rank();
24   fraction=neleglob/size;    /* partition only by the largest dimension */
25   remainder=neleglob%size;   /* remainder after partition which gets evenly distributed by round-robin */
26 
27   if(fraction==0) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"The leading dimension size should be greater than number of processors: %D < %D",neleglob,size);
28 
29   starts[0]=starts[1]=starts[2]=0;       /* default dimensional element = 1 */
30   sizes[0]=sizes[1]=sizes[2]=neleglob;   /* default dimensional element = 1 */
31 
32   if(rank < remainder) {
33     /* This process gets "fraction+1" elements */
34     sizes[dim-1] = (fraction + 1);
35     starts[dim-1] = rank * (fraction+1);
36   } else {
37     /* This process gets "fraction" elements */
38     sizes[dim-1] = fraction;
39     starts[dim-1] = (remainder*(fraction+1) + fraction*(rank-remainder));
40   }
41 
42   for(int i=dim-1; i>=0; --i) {
43     ise[2*i]=starts[i];ise[2*i+1]=starts[i]+sizes[i];
44   }
45 
46   PetscFunctionReturn(0);
47 }
48 
49 static void set_structured_coordinates(PetscInt i, PetscInt j, PetscInt k, PetscReal hxyz, PetscInt vcount, std::vector<double*>& vcoords)
50 {
51   vcoords[0][vcount] = i*hxyz;
52   vcoords[1][vcount] = j*hxyz;
53   vcoords[2][vcount] = k*hxyz;
54 //  PetscPrintf(PETSC_COMM_SELF, " vertex - %D, %D, %D - [%G, %G]\n", i, j, k, vcoords[0][vcount], vcoords[1][vcount]);
55 }
56 
57 static void set_element_connectivity(PetscInt dim, moab::EntityType etype, PetscInt offset, PetscInt nele, PetscInt i, PetscInt j, PetscInt k, PetscInt vfirst, moab::EntityHandle *connectivity)
58 {
59   std::vector<int>    subent_conn(pow(2,dim));  /* only linear edge, quad, hex supported now */
60 
61   moab::CN::SubEntityVertexIndices(etype, dim, 0, subent_conn.data());
62 
63   switch(dim) {
64     case 1:
65       connectivity[offset+subent_conn[0]] = vfirst+i;
66       connectivity[offset+subent_conn[1]] = vfirst+(i+1);
67 //            PetscPrintf(PETSC_COMM_SELF, "[%D] ELEMENT[%D,%D,%D]: CONNECTIVITY = %D, %D\n", pcomm->rank(), i,j,k,connectivity[offset+subent_conn[0]], connectivity[offset+subent_conn[1]]);
68       break;
69     case 2:
70       connectivity[offset+subent_conn[0]] = vfirst+i+j*(nele+1);
71       connectivity[offset+subent_conn[1]] = vfirst+(i+1)+j*(nele+1);
72       connectivity[offset+subent_conn[2]] = vfirst+(i+1)+(j+1)*(nele+1);
73       connectivity[offset+subent_conn[3]] = vfirst+i+(j+1)*(nele+1);
74 //            PetscPrintf(PETSC_COMM_SELF, "[%D] ELEMENT[%D,%D,%D]: CONNECTIVITY = %D, %D, %D, %D\n", pcomm->rank(), i,j,k,connectivity[offset+subent_conn[0]], connectivity[offset+subent_conn[1]], connectivity[offset+subent_conn[2]], connectivity[offset+subent_conn[3]]);
75       break;
76     case 3:
77     default:
78       connectivity[offset+subent_conn[0]] = vfirst+i+(nele+1)*(j+(nele+1)*k);
79       connectivity[offset+subent_conn[1]] = vfirst+(i+1)+(nele+1)*(j+(nele+1)*k);
80       connectivity[offset+subent_conn[2]] = vfirst+(i+1)+(nele+1)*((j+1)+(nele+1)*k);
81       connectivity[offset+subent_conn[3]] = vfirst+i+(nele+1)*((j+1)+(nele+1)*k);
82       connectivity[offset+subent_conn[4]] = vfirst+i+(nele+1)*(j+(nele+1)*(k+1));
83       connectivity[offset+subent_conn[5]] = vfirst+(i+1)+(nele+1)*(j+(nele+1)*(k+1));
84       connectivity[offset+subent_conn[6]] = vfirst+(i+1)+(nele+1)*((j+1)+(nele+1)*(k+1));
85       connectivity[offset+subent_conn[7]] = vfirst+i+(nele+1)*((j+1)+(nele+1)*(k+1));
86       break;
87   }
88 
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMMoabCreateBoxMesh"
93 PetscErrorCode DMMoabCreateBoxMesh(MPI_Comm comm, PetscInt dim, PetscInt nele, PetscInt nghost, DM *dm)
94 {
95   PetscErrorCode  ierr;
96   moab::ErrorCode merr;
97   PetscInt        i,j,k,n,nprocs,rank;
98   DM_Moab        *dmmoab;
99   moab::Interface *mbiface;
100   moab::ParallelComm *pcomm;
101   moab::ReadUtilIface* readMeshIface;
102 
103   moab::Tag  id_tag=PETSC_NULL;
104   moab::Range         ownedvtx,ownedelms;
105   moab::EntityHandle  vfirst,efirst;
106   std::vector<double*> vcoords;
107   moab::EntityHandle  *connectivity = 0;
108   moab::EntityType etype;
109   PetscInt    ise[6];
110   PetscReal   xse[6];
111 
112   const PetscInt npts=nele+1;        /* Number of points in every dimension */
113   PetscInt vpere,locnele,locnpts,ghnele,ghnpts;    /* Number of verts/element, vertices, elements owned by this process */
114 
115   PetscFunctionBegin;
116   if(dim < 1 || dim > 3) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Invalid dimension argument for mesh: dim=[1,3].\n");
117 
118   ierr = MPI_Comm_size(comm, &nprocs);CHKERRQ(ierr);
119   /* total number of vertices in all dimensions */
120   n=pow(npts,dim);
121 
122   /* do some error checking */
123   if(n < 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Number of points must be >= 2.\n");
124   if(nprocs > n) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Number of processors must be less than or equal to number of elements.\n");
125   if(nghost < 0) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Number of ghost layers cannot be negative.\n");
126 
127   /* Create the basic DMMoab object and keep the default parameters created by DM impls */
128   ierr = DMMoabCreateMoab(comm, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, dm);CHKERRQ(ierr);
129 
130   /* get all the necessary handles from the private DM object */
131   dmmoab = (DM_Moab*)(*dm)->data;
132   mbiface = dmmoab->mbiface;
133   pcomm = dmmoab->pcomm;
134   id_tag = dmmoab->ltog_tag;
135   nprocs = pcomm->size();
136   rank = pcomm->rank();
137   dmmoab->dim = dim;
138 
139   /* No errors yet; proceed with building the mesh */
140   merr = mbiface->query_interface(readMeshIface);MBERRNM(merr);
141 
142   ierr = PetscMemzero(ise,sizeof(PetscInt)*6);CHKERRQ(ierr);
143 
144   /* call the collective routine that computes the domain bounds for a structured mesh using MOAB */
145   ierr = DMMoabComputeDomainBounds_Private(pcomm, dim, nele, ise);CHKERRQ(ierr);
146 
147   /* set some variables based on dimension */
148   switch(dim) {
149    case 1:
150     vpere = 2;
151     locnele = (ise[1]-ise[0]);
152     locnpts = (ise[1]-ise[0]+1);
153     ghnele = (nghost > 0 ? (ise[0] > nghost ? 1 : 0) + (ise[1] < nele - nghost ? 1 : 0) : 0 );
154     ghnpts = (nghost > 0 ? (ise[0] > 0 ? 1 : 0) + (ise[1] < nele ? 1 : 0) : 0);
155     etype = moab::MBEDGE;
156     break;
157    case 2:
158     vpere = 4;
159     locnele = (ise[1]-ise[0])*(ise[3]-ise[2]);
160     locnpts = (ise[1]-ise[0]+1)*(ise[3]-ise[2]+1);
161     ghnele = (nghost > 0 ? (ise[2] > 0 ? nele : 0) + (ise[3] < nele ? nele : 0) : 0);
162     ghnpts = (nghost > 0 ? (ise[2] > 0 ? npts : 0) + (ise[3] < nele ? npts : 0) : 0);
163     etype = moab::MBQUAD;
164     break;
165    case 3:
166     vpere = 8;
167     locnele = (ise[1]-ise[0])*(ise[3]-ise[2])*(ise[5]-ise[4]);
168     locnpts = (ise[1]-ise[0]+1)*(ise[3]-ise[2]+1)*(ise[5]-ise[4]+1);
169     ghnele = (nghost > 0 ? (ise[4] > 0 ? nele*nele : 0) + (ise[5] < nele ? nele*nele : 0) : 0);
170     ghnpts = (nghost > 0 ? (ise[4] > 0 ? npts*npts : 0) + (ise[5] < nele ? npts*npts : 0) : 0);
171     etype = moab::MBHEX;
172     break;
173   }
174 //  PetscPrintf(PETSC_COMM_SELF, "[%D] Element Partition Indices -[%D - %D], [%D - %D], [%D - %D]\n", rank, ise[0], ise[1], ise[2], ise[3], ise[4], ise[5]);
175 
176   /* we have a domain of size [1,1,1] - now compute local co-ordinate box */
177   ierr = PetscMemzero(xse,sizeof(PetscReal)*6);CHKERRQ(ierr);
178   for(i=0; i<6; ++i) {
179     xse[i]=(PetscReal)ise[i]/nele;
180   }
181 
182   /* Create vertexes and set the coodinate of each vertex */
183   merr = readMeshIface->get_node_coords(3,locnpts+ghnpts,0,vfirst,vcoords,n);MBERRNM(merr);
184 
185   /* Compute the co-ordinates of vertices and global IDs */
186   std::vector<int>    vgid(locnpts+ghnpts);
187   int vcount=0;
188   double hxyz=1.0/nele;
189 
190   /* create all the owned vertices */
191   for (k = ise[4]; k <= ise[5]; k++) {
192     for (j = ise[2]; j <= ise[3]; j++) {
193       for (i = ise[0]; i <= ise[1]; i++, vcount++) {
194 //        PetscPrintf(PETSC_COMM_SELF, "[%D] creating owned", rank);
195         set_structured_coordinates(i,j,k,hxyz,vcount,vcoords);
196         vgid[vcount] = (k*npts+j)*npts+i+1;
197       }
198     }
199   }
200 
201   /* create ghosted vertices requested by user - below the current plane */
202   if (ise[2*dim-2] > 0) {
203     for (k = (dim==3?ise[4]-nghost:ise[4]); k <= (dim==3?ise[4]-1:ise[5]); k++) {
204       for (j = (dim==2?ise[2]-nghost:ise[2]); j <= (dim==2?ise[2]-1:ise[3]); j++) {
205         for (i = (dim>1?ise[0]:ise[0]-nghost); i <= (dim>1?ise[1]:ise[0]-1); i++, vcount++) {
206 //          PetscPrintf(PETSC_COMM_SELF, "[%D] creating ghost", rank);
207           set_structured_coordinates(i,j,k,hxyz,vcount,vcoords);
208           vgid[vcount] = (k*npts+j)*npts+i+1;
209         }
210       }
211     }
212   }
213 
214   /* create ghosted vertices requested by user - above the current plane */
215   if (ise[2*dim-1] < nele) {
216     for (k = (dim==3?ise[5]+1:ise[4]); k <= (dim==3?ise[5]+nghost:ise[5]); k++) {
217       for (j = (dim==2?ise[3]+1:ise[2]); j <= (dim==2?ise[3]+nghost:ise[3]); j++) {
218         for (i = (dim>1?ise[0]:ise[1]+1); i <= (dim>1?ise[1]:ise[1]+nghost); i++, vcount++) {
219 //          PetscPrintf(PETSC_COMM_SELF, "[%D] creating ghost", rank);
220           set_structured_coordinates(i,j,k,hxyz,vcount,vcoords);
221           vgid[vcount] = (k*npts+j)*npts+i+1;
222         }
223       }
224     }
225   }
226 
227   if (locnpts+ghnpts != vcount) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Created the wrong number of vertices! (%D!=%D)",locnpts+ghnpts,vcount);
228 
229   merr = mbiface->get_entities_by_type(0,moab::MBVERTEX,ownedvtx,true);MBERRNM(merr);
230   merr = mbiface->add_entities (dmmoab->fileset, ownedvtx);MBERRNM(merr);
231 //  merr = mbiface->get_entities_by_type(dmmoab->fileset,moab::MBVERTEX,ownedvtx,true);MBERRNM(merr);
232 
233   /* The global ID tag is applied to each owned
234      vertex. It acts as an global identifier which MOAB uses to
235      assemble the individual pieces of the mesh:
236      Set the global ID indices */
237   merr = mbiface->tag_set_data(id_tag,ownedvtx,vgid.data());MBERRNM(merr);
238 
239   /* Create elements between mesh points using the ReadUtilInterface
240      get the reference to element connectivities for all local elements from the ReadUtilInterface */
241   merr = readMeshIface->get_element_connect (locnele+ghnele,vpere,etype,1,efirst,connectivity);MBERRNM(merr);
242 
243   /* offset appropriately so that only local ID and not global ID numbers are set for connectivity array */
244 //  PetscPrintf(PETSC_COMM_SELF, "[%D] first local handle %D\n", rank, vfirst);
245   vfirst-=vgid[0]-1;
246 
247    /* 3. Loop over elements in 3 nested loops over i, j, k; for each (i,j,k):
248          and then set the connectivity for each element appropriately */
249   int ecount=0;
250 
251   /* create ghosted elements requested by user - below the current plane */
252   if (ise[2*dim-2] >= nghost) {
253     for (k = (dim==3?ise[4]-nghost:ise[4]); k < (dim==3?ise[4]:std::max(ise[5],1)); k++) {
254       for (j = (dim==2?ise[2]-nghost:ise[2]); j < (dim==2?ise[2]:std::max(ise[3],1)); j++) {
255         for (i = (dim>1?ise[0]:ise[0]-nghost); i < (dim>1?std::max(ise[1],1):ise[0]); i++, ecount++) {
256 //          PetscPrintf(PETSC_COMM_SELF, "[%D] creating ghost element - %D, %D, %D\n", rank, i, j, k);
257           set_element_connectivity(dim, etype, ecount*vpere, nele, i, j, k, vfirst, connectivity);
258         }
259       }
260     }
261   }
262 
263   /* create owned elements requested by user */
264   for (k = ise[4]; k < std::max(ise[5],1); k++) {
265     for (j = ise[2]; j < std::max(ise[3],1); j++) {
266       for (i = ise[0]; i < std::max(ise[1],1); i++,ecount++) {
267 //        PetscPrintf(PETSC_COMM_SELF, "[%D] creating owned element - %D, %D, %D\n", rank, i, j, k);
268         set_element_connectivity(dim, etype, ecount*vpere, nele, i, j, k, vfirst, connectivity);
269       }
270     }
271   }
272 
273   /* create ghosted elements requested by user - above the current plane */
274   if (ise[2*dim-1] <= nele-nghost) {
275     for (k = (dim==3?ise[5]:ise[4]); k < (dim==3?ise[5]+nghost:std::max(ise[5],1)); k++) {
276       for (j = (dim==2?ise[3]:ise[2]); j < (dim==2?ise[3]+nghost:std::max(ise[3],1)); j++) {
277         for (i = (dim>1?ise[0]:ise[1]); i < (dim>1?std::max(ise[1],1):ise[1]+nghost); i++, ecount++) {
278 //          PetscPrintf(PETSC_COMM_SELF, "[%D] creating ghost element - %D, %D, %D\n", rank, i, j, k);
279           set_element_connectivity(dim, etype, ecount*vpere, nele, i, j, k, vfirst, connectivity);
280         }
281       }
282     }
283   }
284 
285   merr = readMeshIface->update_adjacencies(efirst,locnele+ghnele,vpere,connectivity);MBERRNM(merr);
286 
287   /* 2. Get the vertices and hexes from moab and check their numbers against I*J*K and (I-1)*(J-1)*(K-1), resp.
288         first '0' specifies "root set", or entire MOAB instance, second the entity dimension being requested */
289   merr = mbiface->get_entities_by_dimension(0, dim, ownedelms);MBERRNM(merr);
290   merr = mbiface->add_entities (dmmoab->fileset, ownedelms);MBERRNM(merr);
291 //  merr = mbiface->get_entities_by_dimension(dmmoab->fileset, dim, ownedelms);MBERRNM(merr);
292 
293 //  merr = mbiface->unite_meshset(0, dmmoab->fileset);MBERRV(mbiface,merr);
294 
295   if (locnele+ghnele != (int) ownedelms.size())
296     SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Created the wrong number of elements! (%D!=%D)",locnele+ghnele,ownedelms.size());
297   else if(locnpts+ghnpts != (int) ownedvtx.size())
298     SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"Created the wrong number of vertices! (%D!=%D)",locnpts+ghnpts,ownedvtx.size());
299   else
300     PetscInfo2(*dm, "Created %D elements and %D vertices.\n", ownedelms.size(), ownedvtx.size());
301 
302   /* check the handles */
303   merr = pcomm->check_all_shared_handles();MBERRV(mbiface,merr);
304 
305 #if 0
306   std::stringstream sstr;
307   sstr << "test_" << pcomm->rank() << ".vtk";
308   mbiface->write_mesh(sstr.str().c_str());
309 #endif
310 
311   /* resolve the shared entities by exchanging information to adjacent processors */
312   merr = mbiface->get_entities_by_type(dmmoab->fileset,etype,ownedelms,true);MBERRNM(merr);
313   merr = pcomm->resolve_shared_ents(dmmoab->fileset,ownedelms,dim,dim-1,&id_tag);MBERRV(mbiface,merr);
314 
315   /* Reassign global IDs on all entities. */
316   merr = pcomm->assign_global_ids(dmmoab->fileset,dim,1,true,true,true);MBERRNM(merr);
317 
318   merr = pcomm->exchange_ghost_cells(dim,0,1/*nghost*/,dim,true,false,&dmmoab->fileset);MBERRV(mbiface,merr);
319 
320   /* Everything is set up, now just do a tag exchange to update tags
321      on all of the ghost vertexes */
322   merr = mbiface->get_entities_by_type(dmmoab->fileset,moab::MBVERTEX,ownedvtx,true);MBERRNM(merr);
323   merr = mbiface->get_entities_by_dimension(dmmoab->fileset, dim, ownedelms);MBERRNM(merr);
324 
325   merr = pcomm->exchange_tags(id_tag,ownedvtx);MBERRV(mbiface,merr);
326   merr = pcomm->exchange_tags(id_tag,ownedelms);MBERRV(mbiface,merr);
327 
328 //  if(rank) {
329 //    ownedvtx.print(0);
330 //    ownedelms.print(0);
331 //  }
332 
333   PetscFunctionReturn(0);
334 }
335 
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "DMMoab_GetReadOptions_Private"
339 PetscErrorCode DMMoab_GetReadOptions_Private(PetscBool by_rank, PetscInt numproc, PetscInt dim, MoabReadMode mode, PetscInt dbglevel, const char* extra_opts, const char** read_opts)
340 {
341   std::ostringstream str;
342 
343   PetscFunctionBegin;
344   // do parallel read unless only one processor
345   if (numproc > 1) {
346     str << "PARALLEL=" << mode << ";PARTITION=PARALLEL_PARTITION;PARTITION_DISTRIBUTE;";
347     str << "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=" << dim << ".0.1;";
348     if (by_rank)
349       str << "PARTITION_BY_RANK;";
350   }
351 
352   if (dbglevel) {
353     str << "CPUTIME;DEBUG_IO=" << dbglevel << ";";
354     if (numproc>1)
355       str << "DEBUG_PIO=" << dbglevel << ";";
356   }
357 
358   if (extra_opts)
359     str << extra_opts;
360 
361   *read_opts = str.str().c_str();
362   PetscFunctionReturn(0);
363 }
364 
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "DMMoabLoadFromFile"
368 PetscErrorCode DMMoabLoadFromFile(MPI_Comm comm,PetscInt dim,const char* filename, const char* usrreadopts, DM *dm)
369 {
370   moab::ErrorCode merr;
371   PetscInt        nprocs,dbglevel=0;
372   PetscBool       part_by_rank=PETSC_FALSE;
373   DM_Moab        *dmmoab;
374   moab::Interface *mbiface;
375   moab::ParallelComm *pcomm;
376   moab::Range verts,elems;
377   const char *readopts;
378   PetscErrorCode ierr;
379 
380   PetscFunctionBegin;
381   PetscValidPointer(dm,5);
382 
383   /* Create the basic DMMoab object and keep the default parameters created by DM impls */
384   ierr = DMMoabCreateMoab(comm, PETSC_NULL, PETSC_NULL, PETSC_NULL, PETSC_NULL, dm);CHKERRQ(ierr);
385 
386   /* get all the necessary handles from the private DM object */
387   dmmoab = (DM_Moab*)(*dm)->data;
388   mbiface = dmmoab->mbiface;
389   pcomm = dmmoab->pcomm;
390   nprocs = pcomm->size();
391   dmmoab->dim = dim;
392 
393   /* TODO: Use command-line options to control by_rank, verbosity, MoabReadMode and extra options */
394   ierr  = PetscOptionsBegin(PETSC_COMM_WORLD, "", "Options for reading/writing MOAB based meshes from file", "DMMoab");
395   ierr  = PetscOptionsInt("-dmmb_rw_dbg", "The verbosity level for reading and writing MOAB meshes", "dmmbutil.cxx", dbglevel, &dbglevel, NULL);CHKERRQ(ierr);
396   ierr  = PetscOptionsBool("-dmmb_part_by_rank", "Use partition by rank when reading MOAB meshes from file", "dmmbutil.cxx", part_by_rank, &part_by_rank, NULL);CHKERRQ(ierr);
397   ierr  = PetscOptionsEnd();
398 
399   /* add mesh loading options specific to the DM */
400   ierr = DMMoab_GetReadOptions_Private(part_by_rank, nprocs, dim, MOAB_PARROPTS_READ_PART, dbglevel, usrreadopts, &readopts);CHKERRQ(ierr);
401 
402   PetscInfo2(*dm, "Reading file %s with options: %s\n",filename,readopts);
403 
404   /* Load the mesh from a file. */
405   merr = mbiface->load_file(filename, &dmmoab->fileset, readopts);MBERRVM(mbiface,"Reading MOAB file failed.", merr);
406 
407   /* Reassign global IDs on all entities. */
408   merr = pcomm->assign_global_ids(dmmoab->fileset,dim,1,true,true,true);MBERRNM(merr);
409 
410   /* load the local vertices */
411   merr = mbiface->get_entities_by_type(dmmoab->fileset, moab::MBVERTEX, verts, true);MBERRNM(merr);
412   /* load the local elements */
413   merr = mbiface->get_entities_by_dimension(dmmoab->fileset, dim, elems, true);MBERRNM(merr);
414 
415   /* Everything is set up, now just do a tag exchange to update tags
416      on all of the ghost vertexes */
417   merr = pcomm->exchange_tags(dmmoab->ltog_tag,verts);MBERRV(mbiface,merr);
418   merr = pcomm->exchange_tags(dmmoab->ltog_tag,elems);MBERRV(mbiface,merr);
419 
420   merr = pcomm->exchange_ghost_cells(dim,0,1,0,true,true,&dmmoab->fileset);MBERRV(mbiface,merr);
421 
422   merr = pcomm->collective_sync_partition();MBERR("Collective sync failed", merr);
423 
424   PetscInfo3(*dm, "MOAB file '%s' was successfully loaded. Found %D vertices and %D elements.\n", filename, verts.size(), elems.size());
425 
426 #if 1
427   std::stringstream sstr;
428   sstr << "test_" << pcomm->rank() << ".vtk";
429   mbiface->write_mesh(sstr.str().c_str());
430 #endif
431 
432   PetscFunctionReturn(0);
433 }
434 
435