xref: /petsc/src/mat/tutorials/ex3.c (revision e00437b97e7e022a766c323f69bd5255763f3ff2)
152030a5eSPierre Jolivet static char help[] = "Illustration of MatIS using a 1D Laplacian assembly\n\n";
2c4762a1bSJed Brown 
3c4762a1bSJed Brown /*
4c4762a1bSJed Brown   MatIS means that the matrix is not assembled. The easiest way to think of this (for me) is that processes do not have
5c4762a1bSJed Brown   to hold full matrix rows. One process can hold part of row i, and another processes can hold another part. However, there
6c4762a1bSJed Brown   are still the same number of global rows. The local size here is not the size of the local IS block, which we call the
7c4762a1bSJed Brown   overlap size, since that is a property only of MatIS. It is the size of the local piece of the vector you multiply in
8c4762a1bSJed Brown   MatMult(). This allows PETSc to understand the parallel layout of the Vec, and how it matches the Mat. If you only know
9c4762a1bSJed Brown   the overlap size when assembling, it is best to use PETSC_DECIDE for the local size in the creation routine, so that PETSc
10c4762a1bSJed Brown   automatically partitions the unknowns.
11c4762a1bSJed Brown 
12c4762a1bSJed Brown   Each P_1 element matrix for a cell will be
13c4762a1bSJed Brown 
14c4762a1bSJed Brown     /  1 -1 \
15c4762a1bSJed Brown     \ -1  1 /
16c4762a1bSJed Brown 
17c4762a1bSJed Brown   so that the assembled matrix has a tridiagonal [-1, 2, -1] pattern. We will use 1 cell per process for illustration,
18c4762a1bSJed Brown   and allow PETSc to decide the ownership.
19c4762a1bSJed Brown */
20c4762a1bSJed Brown 
21c4762a1bSJed Brown #include <petscmat.h>
22c4762a1bSJed Brown 
23c4762a1bSJed Brown int main(int argc, char **argv) {
24c4762a1bSJed Brown   MPI_Comm               comm;
25c4762a1bSJed Brown   Mat                    A;
26c4762a1bSJed Brown   Vec                    x, y;
27c4762a1bSJed Brown   ISLocalToGlobalMapping map;
28c4762a1bSJed Brown   PetscScalar            elemMat[4] = {1.0, -1.0, -1.0, 1.0};
29c4762a1bSJed Brown   PetscReal              error;
30c4762a1bSJed Brown   PetscInt               overlapSize = 2, globalIdx[2];
31c4762a1bSJed Brown   PetscMPIInt            rank, size;
32c4762a1bSJed Brown 
339566063dSJacob Faibussowitsch   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
34c4762a1bSJed Brown   comm = PETSC_COMM_WORLD;
359566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
369566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
37c4762a1bSJed Brown   /* Create local-to-global map */
38c4762a1bSJed Brown   globalIdx[0] = rank;
39c4762a1bSJed Brown   globalIdx[1] = rank+1;
409566063dSJacob Faibussowitsch   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, overlapSize, globalIdx, PETSC_COPY_VALUES, &map));
41c4762a1bSJed Brown   /* Create matrix */
429566063dSJacob Faibussowitsch   PetscCall(MatCreateIS(comm, 1, PETSC_DECIDE, PETSC_DECIDE, size+1, size+1, map, map, &A));
439566063dSJacob Faibussowitsch   PetscCall(PetscObjectSetName((PetscObject) A, "A"));
449566063dSJacob Faibussowitsch   PetscCall(ISLocalToGlobalMappingDestroy(&map));
459566063dSJacob Faibussowitsch   PetscCall(MatISSetPreallocation(A, overlapSize, NULL, overlapSize, NULL));
469566063dSJacob Faibussowitsch   PetscCall(MatSetValues(A, 2, globalIdx, 2, globalIdx, elemMat, ADD_VALUES));
479566063dSJacob Faibussowitsch   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
489566063dSJacob Faibussowitsch   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
49c4762a1bSJed Brown   /* Check that the constant vector is in the nullspace */
509566063dSJacob Faibussowitsch   PetscCall(MatCreateVecs(A, &x, &y));
519566063dSJacob Faibussowitsch   PetscCall(VecSet(x, 1.0));
529566063dSJacob Faibussowitsch   PetscCall(PetscObjectSetName((PetscObject) x, "x"));
539566063dSJacob Faibussowitsch   PetscCall(VecViewFromOptions(x, NULL, "-x_view"));
549566063dSJacob Faibussowitsch   PetscCall(MatMult(A, x, y));
559566063dSJacob Faibussowitsch   PetscCall(PetscObjectSetName((PetscObject) y, "y"));
569566063dSJacob Faibussowitsch   PetscCall(VecViewFromOptions(y, NULL, "-y_view"));
579566063dSJacob Faibussowitsch   PetscCall(VecNorm(y, NORM_2, &error));
58*e00437b9SBarry Smith   PetscCheck(error <= PETSC_SMALL,comm, PETSC_ERR_ARG_WRONG, "Invalid output, x should be in the nullspace of A");
59c4762a1bSJed Brown   /* Check that an interior unit vector gets mapped to something of 1-norm 4 */
60c4762a1bSJed Brown   if (size > 1) {
619566063dSJacob Faibussowitsch     PetscCall(VecSet(x, 0.0));
629566063dSJacob Faibussowitsch     PetscCall(VecSetValue(x, 1, 1.0, INSERT_VALUES));
639566063dSJacob Faibussowitsch     PetscCall(VecAssemblyBegin(x));
649566063dSJacob Faibussowitsch     PetscCall(VecAssemblyEnd(x));
659566063dSJacob Faibussowitsch     PetscCall(MatMult(A, x, y));
669566063dSJacob Faibussowitsch     PetscCall(VecNorm(y, NORM_1, &error));
67*e00437b9SBarry Smith     PetscCheck(PetscAbsReal(error - 4) <= PETSC_SMALL,comm, PETSC_ERR_ARG_WRONG, "Invalid output for matrix multiply");
68c4762a1bSJed Brown   }
69c4762a1bSJed Brown   /* Cleanup */
709566063dSJacob Faibussowitsch   PetscCall(MatDestroy(&A));
719566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&x));
729566063dSJacob Faibussowitsch   PetscCall(VecDestroy(&y));
739566063dSJacob Faibussowitsch   PetscCall(PetscFinalize());
74b122ec5aSJacob Faibussowitsch   return 0;
75c4762a1bSJed Brown }
76c4762a1bSJed Brown 
77c4762a1bSJed Brown /*TEST
78c4762a1bSJed Brown 
79c4762a1bSJed Brown   test:
80c4762a1bSJed Brown     suffix: 0
81c4762a1bSJed Brown     requires:
82c4762a1bSJed Brown     args:
83c4762a1bSJed Brown 
84c4762a1bSJed Brown   test:
85c4762a1bSJed Brown     suffix: 1
86c4762a1bSJed Brown     nsize: 3
87c4762a1bSJed Brown     args:
88c4762a1bSJed Brown 
89c4762a1bSJed Brown   test:
90c4762a1bSJed Brown     suffix: 2
91c4762a1bSJed Brown     nsize: 7
92c4762a1bSJed Brown     args:
93c4762a1bSJed Brown 
94c4762a1bSJed Brown TEST*/
95