xref: /petsc/src/dm/impls/plex/tutorials/dmplexgetrestoreclosureindices.F90 (revision c3871b17014c46196ba3ff79cb8a17fe47b2df8c)
1*c3871b17SNoam T!   Use DMPlexGetClosureIndices to check for shared node DOF
2*c3871b17SNoam T!   The mesh consists of two tetrehadra, sharing a (triangular) face, hence
3*c3871b17SNoam T!   the number of shared DOF equals 3 nodes x 3 dof/node = 9
4*c3871b17SNoam T!
5*c3871b17SNoam Tprogram main
6*c3871b17SNoam T#include <petsc/finclude/petscdmplex.h>
7*c3871b17SNoam T#include <petsc/finclude/petscdm.h>
8*c3871b17SNoam T#include <petsc/finclude/petscsys.h>
9*c3871b17SNoam T#include <petsc/finclude/petsc.h>
10*c3871b17SNoam T
11*c3871b17SNoam T  use PETScDM
12*c3871b17SNoam T  use PETScDMplex
13*c3871b17SNoam T
14*c3871b17SNoam T  implicit none
15*c3871b17SNoam T
16*c3871b17SNoam T  DM :: dm, cdm
17*c3871b17SNoam T  PetscInt :: cStart, cEnd
18*c3871b17SNoam T  PetscInt :: cdim, nIdx, idx, cnt, Nf
19*c3871b17SNoam T  PetscInt, parameter :: sharedNodes = 3, zero = 0
20*c3871b17SNoam T  PetscSection :: gS
21*c3871b17SNoam T  PetscErrorCode :: ierr
22*c3871b17SNoam T
23*c3871b17SNoam T  PetscInt, allocatable :: idxMatrix(:, :), offsets(:)
24*c3871b17SNoam T  PetscInt, pointer, dimension(:) :: indices
25*c3871b17SNoam T
26*c3871b17SNoam T  PetscCallA(PetscInitialize(ierr))
27*c3871b17SNoam T
28*c3871b17SNoam T  PetscCallA(DMCreate(PETSC_COMM_WORLD, dm, ierr))
29*c3871b17SNoam T  PetscCallA(DMSetType(dm, DMPLEX, ierr))
30*c3871b17SNoam T  PetscCallA(DMSetFromOptions(dm, ierr))
31*c3871b17SNoam T
32*c3871b17SNoam T  PetscCallA(DMGetCoordinateDM(dm, cdm, ierr))
33*c3871b17SNoam T  PetscCallA(DMGetCoordinateDim(cdm, cdim, ierr))
34*c3871b17SNoam T  PetscCallA(DMGetGlobalSection(cdm, gS, ierr))
35*c3871b17SNoam T
36*c3871b17SNoam T  PetscCallA(DMPlexGetHeightStratum(dm, zero, cStart, cEnd, ierr))
37*c3871b17SNoam T  PetscCallA(PetscSectionGetNumFields(gS, Nf, ierr))
38*c3871b17SNoam T  allocate (offsets(Nf + 1), source=zero)
39*c3871b17SNoam T
40*c3871b17SNoam T  ! Indices per cell
41*c3871b17SNoam T  ! cell 0 (cStart)
42*c3871b17SNoam T  PetscCallA(DMPlexGetClosureIndices(cdm, gS, gS, cStart, PETSC_TRUE, nIdx, indices, offsets, PETSC_NULL_SCALAR_POINTER, ierr))
43*c3871b17SNoam T  allocate (idxMatrix(nIdx, cEnd - cStart))
44*c3871b17SNoam T  idxMatrix(1:nIdx, cStart + 1) = indices
45*c3871b17SNoam T  ! Check size and content of output field offsets array
46*c3871b17SNoam T  PetscCheckA(size(offsets) == (Nf + 1) .and. offsets(1) == zero, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Wrong field offsets")
47*c3871b17SNoam T  PetscCallA(DMPlexRestoreClosureIndices(cdm, gS, gS, cStart, PETSC_TRUE, nIdx, indices, offsets, PETSC_NULL_SCALAR_POINTER, ierr))
48*c3871b17SNoam T  ! cell 1 (cEnd - 1)
49*c3871b17SNoam T  PetscCallA(DMPlexGetClosureIndices(cdm, gS, gS, cEnd - 1, PETSC_TRUE, nIdx, indices, offsets, PETSC_NULL_SCALAR_POINTER, ierr))
50*c3871b17SNoam T  idxMatrix(1:nIdx, cEnd - Cstart) = indices
51*c3871b17SNoam T  PetscCallA(DMPlexRestoreClosureIndices(cdm, gS, gS, cEnd - 1, PETSC_TRUE, nIdx, indices, offsets, PETSC_NULL_SCALAR_POINTER, ierr))
52*c3871b17SNoam T
53*c3871b17SNoam T  ! Check number of shared indices between cell 0 and cell 1
54*c3871b17SNoam T  cnt = 0
55*c3871b17SNoam T  do idx = 1, nIdx
56*c3871b17SNoam T    cnt = cnt + count(idxMatrix(idx, 1) == idxMatrix(1:nIdx, cEnd))
57*c3871b17SNoam T  end do
58*c3871b17SNoam T  PetscCheckA(cnt == sharedNodes*cdim, PETSC_COMM_WORLD, PETSC_ERR_PLIB, "Wrong DOF indices")
59*c3871b17SNoam T
60*c3871b17SNoam T  ! Cleanup
61*c3871b17SNoam T  deallocate (offsets)
62*c3871b17SNoam T  deallocate (idxMatrix)
63*c3871b17SNoam T  PetscCallA(DMDestroy(dm, ierr))
64*c3871b17SNoam T  PetscCallA(PetscFinalize(ierr))
65*c3871b17SNoam T
66*c3871b17SNoam Tend program main
67*c3871b17SNoam T
68*c3871b17SNoam T! /*TEST
69*c3871b17SNoam T!
70*c3871b17SNoam T! test:
71*c3871b17SNoam T!   args : -dm_plex_filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/doublet-tet.msh
72*c3871b17SNoam T!   output_file: output/empty.out
73*c3871b17SNoam T!
74*c3871b17SNoam T! TEST*/
75