1 #include <private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscdmredundant.h> /*I "petscdmredundant.h" I*/ 3 #include <petscmat.h> /*I "petscmat.h" I*/ 4 5 typedef struct { 6 PetscInt rank; /* owner */ 7 PetscInt N; /* total number of dofs */ 8 PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */ 9 } DM_Redundant; 10 11 #undef __FUNCT__ 12 #define __FUNCT__ "DMCreateMatrix_Redundant" 13 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,const MatType mtype,Mat *J) 14 { 15 DM_Redundant *red = (DM_Redundant*)dm->data; 16 PetscErrorCode ierr; 17 ISLocalToGlobalMapping ltog,ltogb; 18 PetscInt i,rstart,rend,*cols; 19 PetscScalar *vals; 20 21 PetscFunctionBegin; 22 ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr); 23 ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr); 24 ierr = MatSetType(*J,mtype);CHKERRQ(ierr); 25 ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr); 26 ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr); 27 ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr); 28 ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr); 29 30 ierr = DMGetLocalToGlobalMapping(dm,<og);CHKERRQ(ierr); 31 ierr = DMGetLocalToGlobalMappingBlock(dm,<ogb);CHKERRQ(ierr); 32 ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr); 33 ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr); 34 35 ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr); 36 for (i=0; i<red->N; i++) { 37 cols[i] = i; 38 vals[i] = 0.0; 39 } 40 ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); 41 for (i=rstart; i<rend; i++) { 42 ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 43 } 44 ierr = PetscFree2(cols,vals);CHKERRQ(ierr); 45 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 46 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 47 PetscFunctionReturn(0); 48 } 49 50 #undef __FUNCT__ 51 #define __FUNCT__ "DMDestroy_Redundant" 52 static PetscErrorCode DMDestroy_Redundant(DM dm) 53 { 54 PetscErrorCode ierr; 55 56 PetscFunctionBegin; 57 ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr); 58 ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr); 59 PetscFunctionReturn(0); 60 } 61 62 #undef __FUNCT__ 63 #define __FUNCT__ "DMCreateGlobalVector_Redundant" 64 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec) 65 { 66 PetscErrorCode ierr; 67 DM_Redundant *red = (DM_Redundant*)dm->data; 68 ISLocalToGlobalMapping ltog,ltogb; 69 70 PetscFunctionBegin; 71 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 72 PetscValidPointer(gvec,2); 73 *gvec = 0; 74 ierr = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr); 75 ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr); 76 ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr); 77 ierr = DMGetLocalToGlobalMapping(dm,<og);CHKERRQ(ierr); 78 ierr = DMGetLocalToGlobalMappingBlock(dm,<ogb);CHKERRQ(ierr); 79 ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr); 80 ierr = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr); 81 ierr = PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);CHKERRQ(ierr); 82 PetscFunctionReturn(0); 83 } 84 85 #undef __FUNCT__ 86 #define __FUNCT__ "DMCreateLocalVector_Redundant" 87 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec) 88 { 89 PetscErrorCode ierr; 90 DM_Redundant *red = (DM_Redundant*)dm->data; 91 92 PetscFunctionBegin; 93 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 94 PetscValidPointer(lvec,2); 95 *lvec = 0; 96 ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr); 97 ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr); 98 ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr); 99 ierr = PetscObjectCompose((PetscObject)*lvec,"DM",(PetscObject)dm);CHKERRQ(ierr); 100 PetscFunctionReturn(0); 101 } 102 103 #undef __FUNCT__ 104 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant" 105 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g) 106 { 107 PetscErrorCode ierr; 108 DM_Redundant *red = (DM_Redundant*)dm->data; 109 const PetscScalar *lv; 110 PetscScalar *gv; 111 PetscMPIInt rank; 112 113 PetscFunctionBegin; 114 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 115 ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr); 116 ierr = VecGetArray(g,&gv);CHKERRQ(ierr); 117 switch (imode) { 118 case ADD_VALUES: 119 case MAX_VALUES: 120 { 121 void *source; 122 PetscScalar *buffer; 123 PetscInt i; 124 if (rank == red->rank) { 125 #if defined (PETSC_HAVE_MPI_IN_PLACE) 126 buffer = gv; 127 source = MPI_IN_PLACE; 128 #else 129 ierr = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr); 130 source = buffer; 131 #endif 132 if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i]; 133 #if !defined(PETSC_USE_COMPLEX) 134 if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]); 135 #endif 136 } else { 137 source = (void*)lv; 138 } 139 ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 140 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 141 if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);} 142 #endif 143 } break; 144 case INSERT_VALUES: 145 ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr); 146 break; 147 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported"); 148 } 149 ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr); 150 ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr); 151 PetscFunctionReturn(0); 152 } 153 154 #undef __FUNCT__ 155 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant" 156 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g) 157 { 158 PetscFunctionBegin; 159 PetscFunctionReturn(0); 160 } 161 162 #undef __FUNCT__ 163 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant" 164 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l) 165 { 166 PetscErrorCode ierr; 167 DM_Redundant *red = (DM_Redundant*)dm->data; 168 const PetscScalar *gv; 169 PetscScalar *lv; 170 171 PetscFunctionBegin; 172 ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr); 173 ierr = VecGetArray(l,&lv);CHKERRQ(ierr); 174 switch (imode) { 175 case INSERT_VALUES: 176 if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);} 177 ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 178 break; 179 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported"); 180 } 181 ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr); 182 ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr); 183 PetscFunctionReturn(0); 184 } 185 186 #undef __FUNCT__ 187 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant" 188 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l) 189 { 190 PetscFunctionBegin; 191 PetscFunctionReturn(0); 192 } 193 194 #undef __FUNCT__ 195 #define __FUNCT__ "DMSetUp_Redundant" 196 static PetscErrorCode DMSetUp_Redundant(DM dm) 197 { 198 PetscErrorCode ierr; 199 DM_Redundant *red = (DM_Redundant*)dm->data; 200 PetscInt i,*globals; 201 202 PetscFunctionBegin; 203 ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr); 204 for (i=0; i<red->N; i++) globals[i] = i; 205 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr); 206 ierr = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr); 207 dm->ltogmapb = dm->ltogmap; 208 PetscFunctionReturn(0); 209 } 210 211 #undef __FUNCT__ 212 #define __FUNCT__ "DMView_Redundant" 213 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer) 214 { 215 PetscErrorCode ierr; 216 DM_Redundant *red = (DM_Redundant*)dm->data; 217 PetscBool iascii; 218 219 PetscFunctionBegin; 220 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 221 if (iascii) { 222 ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr); 223 } 224 PetscFunctionReturn(0); 225 } 226 227 #undef __FUNCT__ 228 #define __FUNCT__ "DMCreateColoring_Redundant" 229 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 230 { 231 DM_Redundant *red = (DM_Redundant*)dm->data; 232 PetscErrorCode ierr; 233 PetscInt i,nloc; 234 ISColoringValue *colors; 235 236 PetscFunctionBegin; 237 switch (ctype) { 238 case IS_COLORING_GLOBAL: 239 nloc = red->n; 240 break; 241 case IS_COLORING_GHOSTED: 242 nloc = red->N; 243 break; 244 default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype); 245 } 246 ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 247 for (i=0; i<nloc; i++) colors[i] = i; 248 ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr); 249 ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr); 250 PetscFunctionReturn(0); 251 } 252 253 #undef __FUNCT__ 254 #define __FUNCT__ "DMRefine_Redundant" 255 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf) 256 { 257 PetscErrorCode ierr; 258 PetscMPIInt flag; 259 DM_Redundant *redc = (DM_Redundant*)dmc->data; 260 261 PetscFunctionBegin; 262 ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag); CHKERRQ(ierr); 263 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators"); 264 ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr); 265 PetscFunctionReturn(0); 266 } 267 268 #undef __FUNCT__ 269 #define __FUNCT__ "DMCoarsen_Redundant" 270 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc) 271 { 272 PetscErrorCode ierr; 273 PetscMPIInt flag; 274 DM_Redundant *redf = (DM_Redundant*)dmf->data; 275 276 PetscFunctionBegin; 277 ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag); CHKERRQ(ierr); 278 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators"); 279 ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr); 280 PetscFunctionReturn(0); 281 } 282 283 #undef __FUNCT__ 284 #define __FUNCT__ "DMCreateInterpolation_Redundant" 285 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale) 286 { 287 PetscErrorCode ierr; 288 DM_Redundant *redc = (DM_Redundant*)dmc->data; 289 DM_Redundant *redf = (DM_Redundant*)dmf->data; 290 PetscMPIInt flag; 291 PetscInt i,rstart,rend; 292 293 PetscFunctionBegin; 294 ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag); CHKERRQ(ierr); 295 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators"); 296 if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match"); 297 if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match"); 298 ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr); 299 ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr); 300 ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr); 301 ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr); 302 ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr); 303 ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr); 304 for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);} 305 ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 306 ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 307 if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);} 308 PetscFunctionReturn(0); 309 } 310 311 #undef __FUNCT__ 312 #define __FUNCT__ "DMRedundantSetSize" 313 /*@ 314 DMRedundantSetSize - Sets the size of a densely coupled redundant object 315 316 Collective on DM 317 318 Input Parameter: 319 + dm - redundant DM 320 . rank - rank of process to own redundant degrees of freedom 321 - N - total number of redundant degrees of freedom 322 323 Level: advanced 324 325 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize() 326 @*/ 327 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N) 328 { 329 PetscErrorCode ierr; 330 331 PetscFunctionBegin; 332 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 333 PetscValidType(dm,1); 334 PetscValidLogicalCollectiveInt(dm,rank,2); 335 PetscValidLogicalCollectiveInt(dm,N,3); 336 ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr); 337 PetscFunctionReturn(0); 338 } 339 340 #undef __FUNCT__ 341 #define __FUNCT__ "DMRedundantGetSize" 342 /*@ 343 DMRedundantGetSize - Gets the size of a densely coupled redundant object 344 345 Not Collective 346 347 Input Parameter: 348 + dm - redundant DM 349 350 Output Parameters: 351 + rank - rank of process to own redundant degrees of freedom (or PETSC_NULL) 352 - N - total number of redundant degrees of freedom (or PETSC_NULL) 353 354 Level: advanced 355 356 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize() 357 @*/ 358 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N) 359 { 360 PetscErrorCode ierr; 361 362 PetscFunctionBegin; 363 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 364 PetscValidType(dm,1); 365 ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr); 366 PetscFunctionReturn(0); 367 } 368 369 EXTERN_C_BEGIN 370 #undef __FUNCT__ 371 #define __FUNCT__ "DMRedundantSetSize_Redundant" 372 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N) 373 { 374 DM_Redundant *red = (DM_Redundant*)dm->data; 375 PetscErrorCode ierr; 376 PetscMPIInt myrank; 377 378 PetscFunctionBegin; 379 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr); 380 red->rank = rank; 381 red->N = N; 382 red->n = (myrank == rank) ? N : 0; 383 PetscFunctionReturn(0); 384 } 385 386 #undef __FUNCT__ 387 #define __FUNCT__ "DMRedundantGetSize_Redundant" 388 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N) 389 { 390 DM_Redundant *red = (DM_Redundant*)dm->data; 391 392 PetscFunctionBegin; 393 if (rank) *rank = red->rank; 394 if (N) *N = red->N; 395 PetscFunctionReturn(0); 396 } 397 EXTERN_C_END 398 399 EXTERN_C_BEGIN 400 #undef __FUNCT__ 401 #define __FUNCT__ "DMCreate_Redundant" 402 PetscErrorCode DMCreate_Redundant(DM dm) 403 { 404 PetscErrorCode ierr; 405 DM_Redundant *red; 406 407 PetscFunctionBegin; 408 ierr = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr); 409 dm->data = red; 410 411 ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr); 412 dm->ops->setup = DMSetUp_Redundant; 413 dm->ops->view = DMView_Redundant; 414 dm->ops->createglobalvector = DMCreateGlobalVector_Redundant; 415 dm->ops->createlocalvector = DMCreateLocalVector_Redundant; 416 dm->ops->creatematrix = DMCreateMatrix_Redundant; 417 dm->ops->destroy = DMDestroy_Redundant; 418 dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant; 419 dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant; 420 dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant; 421 dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant; 422 dm->ops->refine = DMRefine_Redundant; 423 dm->ops->coarsen = DMCoarsen_Redundant; 424 dm->ops->createinterpolation = DMCreateInterpolation_Redundant; 425 dm->ops->getcoloring = DMCreateColoring_Redundant; 426 ierr = PetscStrallocpy(VECSTANDARD,&dm->vectype);CHKERRQ(ierr); 427 ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr); 428 ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr); 429 PetscFunctionReturn(0); 430 } 431 EXTERN_C_END 432 433 #undef __FUNCT__ 434 #define __FUNCT__ "DMRedundantCreate" 435 /*@C 436 DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables 437 438 Collective on MPI_Comm 439 440 Input Parameter: 441 + comm - the processors that will share the global vector 442 . rank - rank to own the redundant values 443 - N - total number of degrees of freedom 444 445 Output Parameters: 446 . red - the redundant DM 447 448 Level: advanced 449 450 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM() 451 452 @*/ 453 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm) 454 { 455 PetscErrorCode ierr; 456 457 PetscFunctionBegin; 458 PetscValidPointer(dm,2); 459 ierr = DMCreate(comm,dm);CHKERRQ(ierr); 460 ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr); 461 ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr); 462 ierr = DMSetUp(*dm);CHKERRQ(ierr); 463 PetscFunctionReturn(0); 464 } 465