xref: /petsc/src/mat/tests/ex42.c (revision 28b400f66ebc7ae0049166a2294dfcd3df27e64b)
1 
2 static char help[] = "Tests MatIncreaseOverlap() and MatCreateSubmatrices() for the parallel case.\n\
3 This example is similar to ex40.c; here the index sets used are random.\n\
4 Input arguments are:\n\
5   -f <input_file> : file to load.  For example see $PETSC_DIR/share/petsc/datafiles/matrices\n\
6   -nd <size>      : > 0  no of domains per processor \n\
7   -ov <overlap>   : >=0  amount of overlap between domains\n\n";
8 
9 #include <petscmat.h>
10 
11 int main(int argc,char **args)
12 {
13   PetscErrorCode ierr;
14   PetscInt       nd = 2,ov=1,i,j,lsize,m,n,*idx,bs;
15   PetscMPIInt    rank, size;
16   PetscBool      flg;
17   Mat            A,B,*submatA,*submatB;
18   char           file[PETSC_MAX_PATH_LEN];
19   PetscViewer    fd;
20   IS             *is1,*is2;
21   PetscRandom    r;
22   PetscBool      test_unsorted = PETSC_FALSE;
23   PetscScalar    rand;
24 
25   ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
26   CHKERRMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size));
27   CHKERRMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
28   CHKERRQ(PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),NULL));
29   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-nd",&nd,NULL));
30   CHKERRQ(PetscOptionsGetInt(NULL,NULL,"-ov",&ov,NULL));
31   CHKERRQ(PetscOptionsGetBool(NULL,NULL,"-test_unsorted",&test_unsorted,NULL));
32 
33   /* Read matrix A and RHS */
34   CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd));
35   CHKERRQ(MatCreate(PETSC_COMM_WORLD,&A));
36   CHKERRQ(MatSetType(A,MATAIJ));
37   CHKERRQ(MatSetFromOptions(A));
38   CHKERRQ(MatLoad(A,fd));
39   CHKERRQ(PetscViewerDestroy(&fd));
40 
41   /* Read the same matrix as a seq matrix B */
42   CHKERRQ(PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd));
43   CHKERRQ(MatCreate(PETSC_COMM_SELF,&B));
44   CHKERRQ(MatSetType(B,MATSEQAIJ));
45   CHKERRQ(MatSetFromOptions(B));
46   CHKERRQ(MatLoad(B,fd));
47   CHKERRQ(PetscViewerDestroy(&fd));
48 
49   CHKERRQ(MatGetBlockSize(A,&bs));
50 
51   /* Create the Random no generator */
52   CHKERRQ(MatGetSize(A,&m,&n));
53   CHKERRQ(PetscRandomCreate(PETSC_COMM_SELF,&r));
54   CHKERRQ(PetscRandomSetFromOptions(r));
55 
56   /* Create the IS corresponding to subdomains */
57   CHKERRQ(PetscMalloc1(nd,&is1));
58   CHKERRQ(PetscMalloc1(nd,&is2));
59   CHKERRQ(PetscMalloc1(m ,&idx));
60   for (i = 0; i < m; i++) {idx[i] = i;}
61 
62   /* Create the random Index Sets */
63   for (i=0; i<nd; i++) {
64     /* Skip a few,so that the IS on different procs are diffeent*/
65     for (j=0; j<rank; j++) {
66       CHKERRQ(PetscRandomGetValue(r,&rand));
67     }
68     CHKERRQ(PetscRandomGetValue(r,&rand));
69     lsize = (PetscInt)(rand*(m/bs));
70     /* shuffle */
71     for (j=0; j<lsize; j++) {
72       PetscInt k, swap, l;
73 
74       CHKERRQ(PetscRandomGetValue(r,&rand));
75       k      = j + (PetscInt)(rand*((m/bs)-j));
76       for (l = 0; l < bs; l++) {
77         swap        = idx[bs*j+l];
78         idx[bs*j+l] = idx[bs*k+l];
79         idx[bs*k+l] = swap;
80       }
81     }
82     if (!test_unsorted) CHKERRQ(PetscSortInt(lsize*bs,idx));
83     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is1+i));
84     CHKERRQ(ISCreateGeneral(PETSC_COMM_SELF,lsize*bs,idx,PETSC_COPY_VALUES,is2+i));
85     CHKERRQ(ISSetBlockSize(is1[i],bs));
86     CHKERRQ(ISSetBlockSize(is2[i],bs));
87   }
88 
89   if (!test_unsorted) {
90     CHKERRQ(MatIncreaseOverlap(A,nd,is1,ov));
91     CHKERRQ(MatIncreaseOverlap(B,nd,is2,ov));
92 
93     for (i=0; i<nd; ++i) {
94       CHKERRQ(ISSort(is1[i]));
95       CHKERRQ(ISSort(is2[i]));
96     }
97   }
98 
99   CHKERRQ(MatCreateSubMatrices(A,nd,is1,is1,MAT_INITIAL_MATRIX,&submatA));
100   CHKERRQ(MatCreateSubMatrices(B,nd,is2,is2,MAT_INITIAL_MATRIX,&submatB));
101 
102   /* Now see if the serial and parallel case have the same answers */
103   for (i=0; i<nd; ++i) {
104     CHKERRQ(MatEqual(submatA[i],submatB[i],&flg));
105     PetscCheck(flg,PETSC_COMM_SELF,PETSC_ERR_PLIB,"%" PetscInt_FMT "-th paralle submatA != seq submatB",i);
106   }
107 
108   /* Free Allocated Memory */
109   for (i=0; i<nd; ++i) {
110     CHKERRQ(ISDestroy(&is1[i]));
111     CHKERRQ(ISDestroy(&is2[i]));
112   }
113   CHKERRQ(MatDestroySubMatrices(nd,&submatA));
114   CHKERRQ(MatDestroySubMatrices(nd,&submatB));
115 
116   CHKERRQ(PetscRandomDestroy(&r));
117   CHKERRQ(PetscFree(is1));
118   CHKERRQ(PetscFree(is2));
119   CHKERRQ(MatDestroy(&A));
120   CHKERRQ(MatDestroy(&B));
121   CHKERRQ(PetscFree(idx));
122   ierr = PetscFinalize();
123   return ierr;
124 }
125 
126 /*TEST
127 
128    build:
129       requires: !complex
130 
131    test:
132       nsize: 3
133       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
134       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 5 -ov 2
135 
136    test:
137       suffix: 2
138       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -ov 2
139       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
140 
141    test:
142       suffix: unsorted_baij_mpi
143       nsize: 3
144       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
145       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
146 
147    test:
148       suffix: unsorted_baij_seq
149       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
150       args: -f ${DATAFILESPATH}/matrices/cfd.1.10 -nd 8 -mat_type baij -test_unsorted
151 
152    test:
153       suffix: unsorted_mpi
154       nsize: 3
155       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
156       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
157 
158    test:
159       suffix: unsorted_seq
160       requires: datafilespath double !defined(PETSC_USE_64BIT_INDICES) !complex
161       args: -f ${DATAFILESPATH}/matrices/arco1 -nd 8 -test_unsorted
162 
163 TEST*/
164