1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*J 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 J*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATSEQAIJPTHREAD "seqaijpthread" 38 #define MATAIJPTHREAD "aijpthread" 39 #define MATMPIAIJ "mpiaij" 40 #define MATAIJCRL "aijcrl" 41 #define MATSEQAIJCRL "seqaijcrl" 42 #define MATMPIAIJCRL "mpiaijcrl" 43 #define MATAIJCUSP "aijcusp" 44 #define MATSEQAIJCUSP "seqaijcusp" 45 #define MATMPIAIJCUSP "mpiaijcusp" 46 #define MATAIJCUSPARSE "aijcusparse" 47 #define MATSEQAIJCUSPARSE "seqaijcusparse" 48 #define MATMPIAIJCUSPARSE "mpiaijcusparse" 49 #define MATAIJPERM "aijperm" 50 #define MATSEQAIJPERM "seqaijperm" 51 #define MATMPIAIJPERM "mpiaijperm" 52 #define MATSHELL "shell" 53 #define MATDENSE "dense" 54 #define MATSEQDENSE "seqdense" 55 #define MATMPIDENSE "mpidense" 56 #define MATBAIJ "baij" 57 #define MATSEQBAIJ "seqbaij" 58 #define MATMPIBAIJ "mpibaij" 59 #define MATMPIADJ "mpiadj" 60 #define MATSBAIJ "sbaij" 61 #define MATSEQSBAIJ "seqsbaij" 62 #define MATMPISBAIJ "mpisbaij" 63 #define MATSEQBSTRM "seqbstrm" 64 #define MATMPIBSTRM "mpibstrm" 65 #define MATBSTRM "bstrm" 66 #define MATSEQSBSTRM "seqsbstrm" 67 #define MATMPISBSTRM "mpisbstrm" 68 #define MATSBSTRM "sbstrm" 69 #define MATDAAD "daad" 70 #define MATMFFD "mffd" 71 #define MATNORMAL "normal" 72 #define MATLRC "lrc" 73 #define MATSCATTER "scatter" 74 #define MATBLOCKMAT "blockmat" 75 #define MATCOMPOSITE "composite" 76 #define MATFFT "fft" 77 #define MATFFTW "fftw" 78 #define MATSEQCUFFT "seqcufft" 79 #define MATTRANSPOSEMAT "transpose" 80 #define MATSCHURCOMPLEMENT "schurcomplement" 81 #define MATPYTHON "python" 82 #define MATHYPRESTRUCT "hyprestruct" 83 #define MATHYPRESSTRUCT "hypresstruct" 84 #define MATSUBMATRIX "submatrix" 85 #define MATLOCALREF "localref" 86 #define MATNEST "nest" 87 #define MATIJ "ij" 88 89 /*J 90 MatSolverPackage - String with the name of a PETSc matrix solver type. 91 92 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 93 SuperLU or SuperLU_Dist etc. 94 95 96 Level: beginner 97 98 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 99 J*/ 100 #define MatSolverPackage char* 101 #define MATSOLVERSPOOLES "spooles" 102 #define MATSOLVERSUPERLU "superlu" 103 #define MATSOLVERSUPERLU_DIST "superlu_dist" 104 #define MATSOLVERUMFPACK "umfpack" 105 #define MATSOLVERCHOLMOD "cholmod" 106 #define MATSOLVERESSL "essl" 107 #define MATSOLVERLUSOL "lusol" 108 #define MATSOLVERMUMPS "mumps" 109 #define MATSOLVERPASTIX "pastix" 110 #define MATSOLVERMATLAB "matlab" 111 #define MATSOLVERPETSC "petsc" 112 #define MATSOLVERPLAPACK "plapack" 113 #define MATSOLVERBAS "bas" 114 #define MATSOLVERCUSPARSE "cusparse" 115 #define MATSOLVERBSTRM "bstrm" 116 #define MATSOLVERSBSTRM "sbstrm" 117 118 /*E 119 MatFactorType - indicates what type of factorization is requested 120 121 Level: beginner 122 123 Any additions/changes here MUST also be made in include/finclude/petscmat.h 124 125 .seealso: MatSolverPackage, MatGetFactor() 126 E*/ 127 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 128 extern const char *const MatFactorTypes[]; 129 130 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 131 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 132 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 133 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 134 135 /* Logging support */ 136 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 137 extern PetscClassId MAT_CLASSID; 138 extern PetscClassId MAT_FDCOLORING_CLASSID; 139 extern PetscClassId MAT_TRANSPOSECOLORING_CLASSID; 140 extern PetscClassId MAT_PARTITIONING_CLASSID; 141 extern PetscClassId MAT_COARSEN_CLASSID; 142 extern PetscClassId MAT_NULLSPACE_CLASSID; 143 extern PetscClassId MATMFFD_CLASSID; 144 145 /*E 146 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 147 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 148 that the input matrix is to be replaced with the converted matrix. 149 150 Level: beginner 151 152 Any additions/changes here MUST also be made in include/finclude/petscmat.h 153 154 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 155 E*/ 156 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 157 158 /*E 159 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 160 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 161 162 Level: beginner 163 164 .seealso: MatGetSeqNonzerostructure() 165 E*/ 166 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 167 168 extern PetscErrorCode MatInitializePackage(const char[]); 169 170 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 171 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 172 extern PetscErrorCode MatSetType(Mat,const MatType); 173 extern PetscErrorCode MatSetFromOptions(Mat); 174 extern PetscErrorCode MatRegisterAll(const char[]); 175 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 176 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 177 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 178 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 179 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 180 181 /*MC 182 MatRegisterDynamic - Adds a new matrix type 183 184 Synopsis: 185 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 186 187 Not Collective 188 189 Input Parameters: 190 + name - name of a new user-defined matrix type 191 . path - path (either absolute or relative) the library containing this solver 192 . name_create - name of routine to create method context 193 - routine_create - routine to create method context 194 195 Notes: 196 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 197 198 If dynamic libraries are used, then the fourth input argument (routine_create) 199 is ignored. 200 201 Sample usage: 202 .vb 203 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 204 "MyMatCreate",MyMatCreate); 205 .ve 206 207 Then, your solver can be chosen with the procedural interface via 208 $ MatSetType(Mat,"my_mat") 209 or at runtime via the option 210 $ -mat_type my_mat 211 212 Level: advanced 213 214 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 215 If your function is not being put into a shared library then use VecRegister() instead 216 217 .keywords: Mat, register 218 219 .seealso: MatRegisterAll(), MatRegisterDestroy() 220 221 M*/ 222 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 224 #else 225 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 226 #endif 227 228 extern PetscBool MatRegisterAllCalled; 229 extern PetscFList MatList; 230 extern PetscFList MatColoringList; 231 extern PetscFList MatPartitioningList; 232 extern PetscFList MatCoarsenList; 233 234 /*E 235 MatStructure - Indicates if the matrix has the same nonzero structure 236 237 Level: beginner 238 239 Any additions/changes here MUST also be made in include/finclude/petscmat.h 240 241 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 242 E*/ 243 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure; 244 245 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 246 extern PetscErrorCode MatCreateDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 247 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 248 extern PetscErrorCode MatCreateAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 249 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 250 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 251 252 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 253 extern PetscErrorCode MatCreateBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 254 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 255 256 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 257 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 258 259 extern PetscErrorCode MatCreateSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 260 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 261 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 262 extern PetscErrorCode MatXAIJSetPreallocation(Mat,PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*); 263 264 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 265 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 266 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 267 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 268 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 269 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 270 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 271 272 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 273 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 274 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 275 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 276 277 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 278 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 279 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 280 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 281 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 282 extern PetscErrorCode MatCompositeMerge(Mat); 283 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 284 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 285 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 286 287 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 288 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 289 290 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 291 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 292 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 293 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 294 295 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 296 297 extern PetscErrorCode MatSetUp(Mat); 298 extern PetscErrorCode MatDestroy(Mat*); 299 300 extern PetscErrorCode MatConjugate(Mat); 301 extern PetscErrorCode MatRealPart(Mat); 302 extern PetscErrorCode MatImaginaryPart(Mat); 303 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 304 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 305 extern PetscErrorCode MatInvertBlockDiagonal(Mat,PetscScalar **); 306 307 /* ------------------------------------------------------------*/ 308 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 309 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 310 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 311 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 312 extern PetscErrorCode MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]); 313 314 /*S 315 MatStencil - Data structure (C struct) for storing information about a single row or 316 column of a matrix as indexed on an associated grid. 317 318 Fortran usage is different, see MatSetValuesStencil() for details. 319 320 Level: beginner 321 322 Concepts: matrix; linear operator 323 324 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil() 325 S*/ 326 typedef struct { 327 PetscInt k,j,i,c; 328 } MatStencil; 329 330 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 331 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 332 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 333 334 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 335 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 336 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 337 338 /*E 339 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 340 to continue to add values to it 341 342 Level: beginner 343 344 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 345 E*/ 346 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 347 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 348 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 349 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 350 351 352 353 /*E 354 MatOption - Options that may be set for a matrix and its behavior or storage 355 356 Level: beginner 357 358 Any additions/changes here MUST also be made in include/finclude/petscmat.h 359 360 .seealso: MatSetOption() 361 E*/ 362 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 363 MAT_SYMMETRIC, 364 MAT_STRUCTURALLY_SYMMETRIC, 365 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 366 MAT_NEW_NONZERO_LOCATION_ERR, 367 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 368 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 369 MAT_USE_INODES, 370 MAT_HERMITIAN, 371 MAT_SYMMETRY_ETERNAL, 372 MAT_CHECK_COMPRESSED_ROW, 373 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 374 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 375 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 376 NUM_MAT_OPTIONS} MatOption; 377 extern const char *MatOptions[]; 378 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 379 extern PetscErrorCode MatGetType(Mat,const MatType*); 380 381 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 382 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 383 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 384 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 385 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 386 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 387 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 388 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 389 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 390 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 391 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 392 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 393 extern PetscErrorCode MatGetBlockSizes(Mat,PetscInt *,PetscInt *); 394 extern PetscErrorCode MatSetBlockSizes(Mat,PetscInt,PetscInt); 395 extern PetscErrorCode MatSetNThreads(Mat,PetscInt); 396 extern PetscErrorCode MatGetNThreads(Mat,PetscInt*); 397 398 extern PetscErrorCode MatMult(Mat,Vec,Vec); 399 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 400 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 401 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 402 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 403 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 404 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 405 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 406 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 407 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 408 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 409 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 410 411 /*E 412 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 413 its numerical values copied over or just its nonzero structure. 414 415 Level: beginner 416 417 Any additions/changes here MUST also be made in include/finclude/petscmat.h 418 419 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 420 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 421 $ have several matrices with the same nonzero pattern. 422 423 .seealso: MatDuplicate() 424 E*/ 425 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 426 427 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 428 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 429 430 431 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 432 extern PetscErrorCode MatView(Mat,PetscViewer); 433 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 434 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 435 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 436 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 437 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 438 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 439 extern PetscErrorCode MatLoad(Mat, PetscViewer); 440 441 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 442 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 443 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 444 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 445 446 /*S 447 MatInfo - Context of matrix information, used with MatGetInfo() 448 449 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 450 451 Level: intermediate 452 453 Concepts: matrix^nonzero information 454 455 .seealso: MatGetInfo(), MatInfoType 456 S*/ 457 typedef struct { 458 PetscLogDouble block_size; /* block size */ 459 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 460 PetscLogDouble memory; /* memory allocated */ 461 PetscLogDouble assemblies; /* number of matrix assemblies called */ 462 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 463 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 464 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 465 } MatInfo; 466 467 /*E 468 MatInfoType - Indicates if you want information about the local part of the matrix, 469 the entire parallel matrix or the maximum over all the local parts. 470 471 Level: beginner 472 473 Any additions/changes here MUST also be made in include/finclude/petscmat.h 474 475 .seealso: MatGetInfo(), MatInfo 476 E*/ 477 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 478 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 479 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 480 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 481 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 482 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 483 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 484 extern PetscErrorCode MatGetRowSum(Mat,Vec); 485 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 486 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 487 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 488 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 489 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 490 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 491 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 492 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 493 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 494 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 495 496 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 497 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 498 extern PetscErrorCode MatZeroEntries(Mat); 499 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 500 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 501 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 502 extern PetscErrorCode MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec); 503 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 504 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 505 506 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 507 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 508 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 509 510 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 511 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 512 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 513 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 514 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 515 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 516 517 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 518 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 519 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 520 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 521 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 522 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 523 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 524 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 525 526 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJ(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 527 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJSymbolic(MPI_Comm,Mat,PetscInt,Mat*); 528 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJNumeric(MPI_Comm,Mat,PetscInt,Mat); 529 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJ(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 530 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 531 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJNumeric(Mat,Mat); 532 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 533 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 534 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*); 535 #if defined (PETSC_USE_CTABLE) 536 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 537 #else 538 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 539 #endif 540 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 541 542 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 543 544 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 545 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 546 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 547 548 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 549 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 550 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 551 extern PetscErrorCode MatRARt(Mat,Mat,MatReuse,PetscReal,Mat*); 552 extern PetscErrorCode MatRARtSymbolic(Mat,Mat,PetscReal,Mat*); 553 extern PetscErrorCode MatRARtNumeric(Mat,Mat,Mat); 554 555 extern PetscErrorCode MatTransposeMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 556 extern PetscErrorCode MatTransposetMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 557 extern PetscErrorCode MatTransposetMatMultNumeric(Mat,Mat,Mat); 558 extern PetscErrorCode MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*); 559 extern PetscErrorCode MatMatTransposeMultSymbolic(Mat,Mat,PetscReal,Mat*); 560 extern PetscErrorCode MatMatTransposeMultNumeric(Mat,Mat,Mat); 561 562 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 563 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 564 565 extern PetscErrorCode MatScale(Mat,PetscScalar); 566 extern PetscErrorCode MatShift(Mat,PetscScalar); 567 568 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 569 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 570 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 571 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 572 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 573 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 574 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 575 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 576 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 577 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 578 579 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 580 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 581 582 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 583 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 584 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 585 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 586 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 587 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*); 588 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 589 590 /*MC 591 MatSetValue - Set a single entry into a matrix. 592 593 Not collective 594 595 Input Parameters: 596 + m - the matrix 597 . row - the row location of the entry 598 . col - the column location of the entry 599 . value - the value to insert 600 - mode - either INSERT_VALUES or ADD_VALUES 601 602 Notes: 603 For efficiency one should use MatSetValues() and set several or many 604 values simultaneously if possible. 605 606 Level: beginner 607 608 .seealso: MatSetValues(), MatSetValueLocal() 609 M*/ 610 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 611 612 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 613 614 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 615 616 /*MC 617 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 618 row in a matrix providing the data that one can use to correctly preallocate the matrix. 619 620 Synopsis: 621 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 622 623 Collective on MPI_Comm 624 625 Input Parameters: 626 + comm - the communicator that will share the eventually allocated matrix 627 . nrows - the number of LOCAL rows in the matrix 628 - ncols - the number of LOCAL columns in the matrix 629 630 Output Parameters: 631 + dnz - the array that will be passed to the matrix preallocation routines 632 - ozn - the other array passed to the matrix preallocation routines 633 634 635 Level: intermediate 636 637 Notes: 638 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 639 640 Do not malloc or free dnz and onz, that is handled internally by these routines 641 642 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 643 644 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 645 646 Concepts: preallocation^Matrix 647 648 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 649 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 650 M*/ 651 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 652 { \ 653 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 654 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 655 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 656 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr); __start = 0; __end = __start; \ 657 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 658 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 659 660 /*MC 661 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 662 inserted using a local number of the rows and columns 663 664 Synopsis: 665 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 666 667 Not Collective 668 669 Input Parameters: 670 + map - the row mapping from local numbering to global numbering 671 . nrows - the number of rows indicated 672 . rows - the indices of the rows 673 . cmap - the column mapping from local to global numbering 674 . ncols - the number of columns in the matrix 675 . cols - the columns indicated 676 . dnz - the array that will be passed to the matrix preallocation routines 677 - ozn - the other array passed to the matrix preallocation routines 678 679 680 Level: intermediate 681 682 Notes: 683 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 684 685 Do not malloc or free dnz and onz, that is handled internally by these routines 686 687 Concepts: preallocation^Matrix 688 689 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 690 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 691 M*/ 692 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 693 {\ 694 PetscInt __l;\ 695 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 696 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 697 for (__l=0;__l<nrows;__l++) {\ 698 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 699 }\ 700 } 701 702 /*MC 703 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 704 inserted using a local number of the rows and columns 705 706 Synopsis: 707 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 708 709 Not Collective 710 711 Input Parameters: 712 + map - the mapping between local numbering and global numbering 713 . nrows - the number of rows indicated 714 . rows - the indices of the rows 715 . ncols - the number of columns in the matrix 716 . cols - the columns indicated 717 . dnz - the array that will be passed to the matrix preallocation routines 718 - ozn - the other array passed to the matrix preallocation routines 719 720 721 Level: intermediate 722 723 Notes: 724 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 725 726 Do not malloc or free dnz and onz that is handled internally by these routines 727 728 Concepts: preallocation^Matrix 729 730 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 731 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 732 M*/ 733 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 734 {\ 735 PetscInt __l;\ 736 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 737 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 738 for (__l=0;__l<nrows;__l++) {\ 739 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 740 }\ 741 } 742 743 /*MC 744 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 745 inserted using a local number of the rows and columns 746 747 Synopsis: 748 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 749 750 Not Collective 751 752 Input Parameters: 753 + row - the row 754 . ncols - the number of columns in the matrix 755 - cols - the columns indicated 756 757 Output Parameters: 758 + dnz - the array that will be passed to the matrix preallocation routines 759 - ozn - the other array passed to the matrix preallocation routines 760 761 762 Level: intermediate 763 764 Notes: 765 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 766 767 Do not malloc or free dnz and onz that is handled internally by these routines 768 769 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 770 771 Concepts: preallocation^Matrix 772 773 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 774 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 775 M*/ 776 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 777 { PetscInt __i; \ 778 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 779 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 780 for (__i=0; __i<nc; __i++) {\ 781 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 782 else dnz[row - __rstart]++;\ 783 }\ 784 } 785 786 /*MC 787 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 788 inserted using a local number of the rows and columns 789 790 Synopsis: 791 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 792 793 Not Collective 794 795 Input Parameters: 796 + nrows - the number of rows indicated 797 . rows - the indices of the rows 798 . ncols - the number of columns in the matrix 799 . cols - the columns indicated 800 . dnz - the array that will be passed to the matrix preallocation routines 801 - ozn - the other array passed to the matrix preallocation routines 802 803 804 Level: intermediate 805 806 Notes: 807 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 808 809 Do not malloc or free dnz and onz that is handled internally by these routines 810 811 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 812 813 Concepts: preallocation^Matrix 814 815 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 816 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 817 M*/ 818 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 819 { PetscInt __i; \ 820 for (__i=0; __i<nc; __i++) {\ 821 if (cols[__i] >= __end) onz[row - __rstart]++; \ 822 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 823 }\ 824 } 825 826 /*MC 827 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 828 829 Synopsis: 830 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 831 832 Not Collective 833 834 Input Parameters: 835 . A - matrix 836 . row - row where values exist (must be local to this process) 837 . ncols - number of columns 838 . cols - columns with nonzeros 839 . dnz - the array that will be passed to the matrix preallocation routines 840 - ozn - the other array passed to the matrix preallocation routines 841 842 843 Level: intermediate 844 845 Notes: 846 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 847 848 Do not malloc or free dnz and onz that is handled internally by these routines 849 850 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 851 852 Concepts: preallocation^Matrix 853 854 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 855 MatPreallocateSymmetricSetLocal() 856 M*/ 857 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 858 859 860 /*MC 861 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 862 row in a matrix providing the data that one can use to correctly preallocate the matrix. 863 864 Synopsis: 865 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 866 867 Collective on MPI_Comm 868 869 Input Parameters: 870 + dnz - the array that was be passed to the matrix preallocation routines 871 - ozn - the other array passed to the matrix preallocation routines 872 873 874 Level: intermediate 875 876 Notes: 877 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 878 879 Do not malloc or free dnz and onz that is handled internally by these routines 880 881 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 882 883 Concepts: preallocation^Matrix 884 885 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 886 MatPreallocateSymmetricSetLocal() 887 M*/ 888 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 889 890 891 892 /* Routines unique to particular data structures */ 893 extern PetscErrorCode MatShellGetContext(Mat,void *); 894 895 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 896 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 897 898 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 899 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 900 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 901 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 902 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 903 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool); 904 905 #define MAT_SKIP_ALLOCATION -4 906 907 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 908 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 909 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 910 911 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 912 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 913 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 914 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 915 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 916 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 917 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 918 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 919 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 920 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 921 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 922 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 923 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 924 extern PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat,Mat*); 925 926 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 927 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 928 929 extern PetscErrorCode MatStoreValues(Mat); 930 extern PetscErrorCode MatRetrieveValues(Mat); 931 932 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 933 934 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 935 /* 936 These routines are not usually accessed directly, rather solving is 937 done through the KSP and PC interfaces. 938 */ 939 940 /*J 941 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 942 with an optional dynamic library name, for example 943 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 944 945 Level: beginner 946 947 Cannot use const because the PC objects manipulate the string 948 949 .seealso: MatGetOrdering() 950 J*/ 951 #define MatOrderingType char* 952 #define MATORDERINGNATURAL "natural" 953 #define MATORDERINGND "nd" 954 #define MATORDERING1WD "1wd" 955 #define MATORDERINGRCM "rcm" 956 #define MATORDERINGQMD "qmd" 957 #define MATORDERINGROWLENGTH "rowlength" 958 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 959 960 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 961 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 962 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 963 964 /*MC 965 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 966 967 Synopsis: 968 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 969 970 Not Collective 971 972 Input Parameters: 973 + sname - name of ordering (for example MATORDERINGND) 974 . path - location of library where creation routine is 975 . name - name of function that creates the ordering type,a string 976 - function - function pointer that creates the ordering 977 978 Level: developer 979 980 If dynamic libraries are used, then the fourth input argument (function) 981 is ignored. 982 983 Sample usage: 984 .vb 985 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 986 "MyOrder",MyOrder); 987 .ve 988 989 Then, your partitioner can be chosen with the procedural interface via 990 $ MatOrderingSetType(part,"my_order) 991 or at runtime via the option 992 $ -pc_factor_mat_ordering_type my_order 993 994 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 995 996 .keywords: matrix, ordering, register 997 998 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 999 M*/ 1000 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1001 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 1002 #else 1003 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1004 #endif 1005 1006 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1007 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1008 extern PetscBool MatOrderingRegisterAllCalled; 1009 extern PetscFList MatOrderingList; 1010 1011 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1012 1013 /*S 1014 MatFactorShiftType - Numeric Shift. 1015 1016 Level: beginner 1017 1018 S*/ 1019 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1020 extern const char *MatFactorShiftTypes[]; 1021 1022 /*S 1023 MatFactorInfo - Data passed into the matrix factorization routines 1024 1025 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1026 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1027 1028 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1029 1030 You can use MatFactorInfoInitialize() to set default values. 1031 1032 Level: developer 1033 1034 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1035 MatFactorInfoInitialize() 1036 1037 S*/ 1038 typedef struct { 1039 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1040 PetscReal usedt; 1041 PetscReal dt; /* drop tolerance */ 1042 PetscReal dtcol; /* tolerance for pivoting */ 1043 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1044 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1045 PetscReal levels; /* ICC/ILU(levels) */ 1046 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1047 factorization may be faster if do not pivot */ 1048 PetscReal zeropivot; /* pivot is called zero if less than this */ 1049 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1050 PetscReal shiftamount; /* how large the shift is */ 1051 } MatFactorInfo; 1052 1053 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1054 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1055 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1056 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1057 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1058 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1059 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1060 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1061 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1062 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1063 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1064 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1065 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1066 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1067 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1068 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1069 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1070 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1071 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1072 1073 extern PetscErrorCode MatSetUnfactored(Mat); 1074 1075 /*E 1076 MatSORType - What type of (S)SOR to perform 1077 1078 Level: beginner 1079 1080 May be bitwise ORd together 1081 1082 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1083 1084 MatSORType may be bitwise ORd together, so do not change the numbers 1085 1086 .seealso: MatSOR() 1087 E*/ 1088 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1089 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1090 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1091 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1092 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1093 1094 /* 1095 These routines are for efficiently computing Jacobians via finite differences. 1096 */ 1097 1098 /*J 1099 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1100 with an optional dynamic library name, for example 1101 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1102 1103 Level: beginner 1104 1105 .seealso: MatGetColoring() 1106 J*/ 1107 #define MatColoringType char* 1108 #define MATCOLORINGNATURAL "natural" 1109 #define MATCOLORINGSL "sl" 1110 #define MATCOLORINGLF "lf" 1111 #define MATCOLORINGID "id" 1112 1113 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1114 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1115 1116 /*MC 1117 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1118 matrix package. 1119 1120 Synopsis: 1121 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1122 1123 Not Collective 1124 1125 Input Parameters: 1126 + sname - name of Coloring (for example MATCOLORINGSL) 1127 . path - location of library where creation routine is 1128 . name - name of function that creates the Coloring type, a string 1129 - function - function pointer that creates the coloring 1130 1131 Level: developer 1132 1133 If dynamic libraries are used, then the fourth input argument (function) 1134 is ignored. 1135 1136 Sample usage: 1137 .vb 1138 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1139 "MyColor",MyColor); 1140 .ve 1141 1142 Then, your partitioner can be chosen with the procedural interface via 1143 $ MatColoringSetType(part,"my_color") 1144 or at runtime via the option 1145 $ -mat_coloring_type my_color 1146 1147 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1148 1149 .keywords: matrix, Coloring, register 1150 1151 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1152 M*/ 1153 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1154 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1155 #else 1156 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1157 #endif 1158 1159 extern PetscBool MatColoringRegisterAllCalled; 1160 1161 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1162 extern PetscErrorCode MatColoringRegisterDestroy(void); 1163 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1164 1165 /*S 1166 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1167 and coloring 1168 1169 Level: beginner 1170 1171 Concepts: coloring, sparse Jacobian, finite differences 1172 1173 .seealso: MatFDColoringCreate() 1174 S*/ 1175 typedef struct _p_MatFDColoring* MatFDColoring; 1176 1177 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1178 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1179 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1180 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1181 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1182 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1183 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1184 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1185 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1186 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1187 1188 /*S 1189 MatTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring 1190 1191 Level: beginner 1192 1193 Concepts: coloring, sparse matrix product 1194 1195 .seealso: MatTransposeColoringCreate() 1196 S*/ 1197 typedef struct _p_MatTransposeColoring* MatTransposeColoring; 1198 1199 extern PetscErrorCode MatTransposeColoringCreate(Mat,ISColoring,MatTransposeColoring *); 1200 extern PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring,Mat,Mat); 1201 extern PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring,Mat,Mat); 1202 extern PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*); 1203 1204 /* 1205 These routines are for partitioning matrices: currently used only 1206 for adjacency matrix, MatCreateMPIAdj(). 1207 */ 1208 1209 /*S 1210 MatPartitioning - Object for managing the partitioning of a matrix or graph 1211 1212 Level: beginner 1213 1214 Concepts: partitioning 1215 1216 .seealso: MatPartitioningCreate(), MatPartitioningType 1217 S*/ 1218 typedef struct _p_MatPartitioning* MatPartitioning; 1219 1220 /*J 1221 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1222 with an optional dynamic library name, for example 1223 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1224 1225 Level: beginner 1226 dm 1227 .seealso: MatPartitioningCreate(), MatPartitioning 1228 J*/ 1229 #define MatPartitioningType char* 1230 #define MATPARTITIONINGCURRENT "current" 1231 #define MATPARTITIONINGSQUARE "square" 1232 #define MATPARTITIONINGPARMETIS "parmetis" 1233 #define MATPARTITIONINGCHACO "chaco" 1234 #define MATPARTITIONINGPARTY "party" 1235 #define MATPARTITIONINGPTSCOTCH "ptscotch" 1236 1237 1238 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1239 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1240 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1241 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1242 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1243 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1244 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1245 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1246 1247 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1248 1249 /*MC 1250 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1251 matrix package. 1252 1253 Synopsis: 1254 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1255 1256 Not Collective 1257 1258 Input Parameters: 1259 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1260 . path - location of library where creation routine is 1261 . name - name of function that creates the partitioning type, a string 1262 - function - function pointer that creates the partitioning type 1263 1264 Level: developer 1265 1266 If dynamic libraries are used, then the fourth input argument (function) 1267 is ignored. 1268 1269 Sample usage: 1270 .vb 1271 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1272 "MyPartCreate",MyPartCreate); 1273 .ve 1274 1275 Then, your partitioner can be chosen with the procedural interface via 1276 $ MatPartitioningSetType(part,"my_part") 1277 or at runtime via the option 1278 $ -mat_partitioning_type my_part 1279 1280 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1281 1282 .keywords: matrix, partitioning, register 1283 1284 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1285 M*/ 1286 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1287 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1288 #else 1289 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1290 #endif 1291 1292 extern PetscBool MatPartitioningRegisterAllCalled; 1293 1294 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1295 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1296 1297 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1298 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1299 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1300 1301 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1302 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1303 1304 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType; 1305 extern const char *MPChacoGlobalTypes[]; 1306 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType; 1307 extern const char *MPChacoLocalTypes[]; 1308 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType; 1309 extern const char *MPChacoEigenTypes[]; 1310 1311 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType); 1312 extern PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*); 1313 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType); 1314 extern PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*); 1315 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1316 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1317 extern PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*); 1318 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal); 1319 extern PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*); 1320 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt); 1321 extern PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*); 1322 1323 #define MP_PARTY_OPT "opt" 1324 #define MP_PARTY_LIN "lin" 1325 #define MP_PARTY_SCA "sca" 1326 #define MP_PARTY_RAN "ran" 1327 #define MP_PARTY_GBF "gbf" 1328 #define MP_PARTY_GCF "gcf" 1329 #define MP_PARTY_BUB "bub" 1330 #define MP_PARTY_DEF "def" 1331 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning,const char*); 1332 #define MP_PARTY_HELPFUL_SETS "hs" 1333 #define MP_PARTY_KERNIGHAN_LIN "kl" 1334 #define MP_PARTY_NONE "no" 1335 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning,const char*); 1336 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1337 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool); 1338 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool); 1339 1340 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType; 1341 extern const char *MPPTScotchStrategyTypes[]; 1342 1343 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal); 1344 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*); 1345 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType); 1346 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*); 1347 1348 /* 1349 These routines are for coarsening matrices: 1350 */ 1351 1352 /*S 1353 MatCoarsen - Object for managing the coarsening of a graph (symmetric matrix) 1354 1355 Level: beginner 1356 1357 Concepts: coarsen 1358 1359 .seealso: MatCoarsenCreate), MatCoarsenType 1360 S*/ 1361 typedef struct _p_MatCoarsen* MatCoarsen; 1362 1363 /*J 1364 MatCoarsenType - String with the name of a PETSc matrix coarsen or the creation function 1365 with an optional dynamic library name, for example 1366 http://www.mcs.anl.gov/petsc/lib.a:coarsencreate() 1367 1368 Level: beginner 1369 dm 1370 .seealso: MatCoarsenCreate(), MatCoarsen 1371 J*/ 1372 #define MatCoarsenType char* 1373 #define MATCOARSENMIS "mis" 1374 #define MATCOARSENHEM "hem" 1375 1376 /* linked list for aggregates */ 1377 typedef struct _PetscCDIntNd{ 1378 struct _PetscCDIntNd *next; 1379 PetscInt gid; 1380 }PetscCDIntNd; 1381 1382 /* only used by node pool */ 1383 typedef struct _PetscCDArrNd{ 1384 struct _PetscCDArrNd *next; 1385 struct _PetscCDIntNd *array; 1386 }PetscCDArrNd; 1387 1388 typedef struct _PetscCoarsenData{ 1389 /* node pool */ 1390 PetscCDArrNd pool_list; 1391 PetscCDIntNd *new_node; 1392 PetscInt new_left; 1393 PetscInt chk_sz; 1394 PetscCDIntNd *extra_nodes; 1395 /* Array of lists */ 1396 PetscCDIntNd **array; 1397 PetscInt size; 1398 /* cache a Mat for communication data */ 1399 Mat mat; 1400 /* cache IS of removed equations */ 1401 IS removedIS; 1402 }PetscCoarsenData; 1403 1404 extern PetscErrorCode MatCoarsenCreate(MPI_Comm,MatCoarsen*); 1405 extern PetscErrorCode MatCoarsenSetType(MatCoarsen,const MatCoarsenType); 1406 extern PetscErrorCode MatCoarsenSetAdjacency(MatCoarsen,Mat); 1407 extern PetscErrorCode MatCoarsenSetGreedyOrdering(MatCoarsen,const IS); 1408 extern PetscErrorCode MatCoarsenSetStrictAggs(MatCoarsen,PetscBool); 1409 extern PetscErrorCode MatCoarsenSetVerbose(MatCoarsen,PetscInt); 1410 extern PetscErrorCode MatCoarsenGetData( MatCoarsen, PetscCoarsenData ** ); 1411 extern PetscErrorCode MatCoarsenApply(MatCoarsen); 1412 extern PetscErrorCode MatCoarsenDestroy(MatCoarsen*); 1413 1414 extern PetscErrorCode MatCoarsenRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatCoarsen)); 1415 1416 /*MC 1417 MatCoarsenRegisterDynamic - Adds a new sparse matrix coarsen to the 1418 matrix package. 1419 1420 Synopsis: 1421 PetscErrorCode MatCoarsenRegisterDynamic(const char *name_coarsen,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatCoarsen)) 1422 1423 Not Collective 1424 1425 Input Parameters: 1426 + sname - name of coarsen (for example MATCOARSENMIS) 1427 . path - location of library where creation routine is 1428 . name - name of function that creates the coarsen type, a string 1429 - function - function pointer that creates the coarsen type 1430 1431 Level: developer 1432 1433 If dynamic libraries are used, then the fourth input argument (function) 1434 is ignored. 1435 1436 Sample usage: 1437 .vb 1438 MatCoarsenRegisterDynamic("my_agg",/home/username/my_lib/lib/libO/solaris/mylib.a, 1439 "MyAggCreate",MyAggCreate); 1440 .ve 1441 1442 Then, your aggregator can be chosen with the procedural interface via 1443 $ MatCoarsenSetType(agg,"my_agg") 1444 or at runtime via the option 1445 $ -mat_coarsen_type my_agg 1446 1447 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1448 1449 .keywords: matrix, coarsen, register 1450 1451 .seealso: MatCoarsenRegisterDestroy(), MatCoarsenRegisterAll() 1452 M*/ 1453 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1454 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,0) 1455 #else 1456 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,d) 1457 #endif 1458 1459 extern PetscBool MatCoarsenRegisterAllCalled; 1460 1461 extern PetscErrorCode MatCoarsenRegisterAll(const char[]); 1462 extern PetscErrorCode MatCoarsenRegisterDestroy(void); 1463 1464 extern PetscErrorCode MatCoarsenView(MatCoarsen,PetscViewer); 1465 extern PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen); 1466 extern PetscErrorCode MatCoarsenGetType(MatCoarsen,const MatCoarsenType*); 1467 1468 1469 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1470 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1471 1472 /* 1473 If you add entries here you must also add them to finclude/petscmat.h 1474 */ 1475 typedef enum { MATOP_SET_VALUES=0, 1476 MATOP_GET_ROW=1, 1477 MATOP_RESTORE_ROW=2, 1478 MATOP_MULT=3, 1479 MATOP_MULT_ADD=4, 1480 MATOP_MULT_TRANSPOSE=5, 1481 MATOP_MULT_TRANSPOSE_ADD=6, 1482 MATOP_SOLVE=7, 1483 MATOP_SOLVE_ADD=8, 1484 MATOP_SOLVE_TRANSPOSE=9, 1485 MATOP_SOLVE_TRANSPOSE_ADD=10, 1486 MATOP_LUFACTOR=11, 1487 MATOP_CHOLESKYFACTOR=12, 1488 MATOP_SOR=13, 1489 MATOP_TRANSPOSE=14, 1490 MATOP_GETINFO=15, 1491 MATOP_EQUAL=16, 1492 MATOP_GET_DIAGONAL=17, 1493 MATOP_DIAGONAL_SCALE=18, 1494 MATOP_NORM=19, 1495 MATOP_ASSEMBLY_BEGIN=20, 1496 MATOP_ASSEMBLY_END=21, 1497 MATOP_SET_OPTION=22, 1498 MATOP_ZERO_ENTRIES=23, 1499 MATOP_ZERO_ROWS=24, 1500 MATOP_LUFACTOR_SYMBOLIC=25, 1501 MATOP_LUFACTOR_NUMERIC=26, 1502 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1503 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1504 MATOP_SETUP_PREALLOCATION=29, 1505 MATOP_ILUFACTOR_SYMBOLIC=30, 1506 MATOP_ICCFACTOR_SYMBOLIC=31, 1507 MATOP_GET_ARRAY=32, 1508 MATOP_RESTORE_ARRAY=33, 1509 MATOP_DUPLICATE=34, 1510 MATOP_FORWARD_SOLVE=35, 1511 MATOP_BACKWARD_SOLVE=36, 1512 MATOP_ILUFACTOR=37, 1513 MATOP_ICCFACTOR=38, 1514 MATOP_AXPY=39, 1515 MATOP_GET_SUBMATRICES=40, 1516 MATOP_INCREASE_OVERLAP=41, 1517 MATOP_GET_VALUES=42, 1518 MATOP_COPY=43, 1519 MATOP_GET_ROW_MAX=44, 1520 MATOP_SCALE=45, 1521 MATOP_SHIFT=46, 1522 MATOP_DIAGONAL_SET=47, 1523 MATOP_ILUDT_FACTOR=48, 1524 MATOP_SET_BLOCK_SIZE=49, 1525 MATOP_GET_ROW_IJ=50, 1526 MATOP_RESTORE_ROW_IJ=51, 1527 MATOP_GET_COLUMN_IJ=52, 1528 MATOP_RESTORE_COLUMN_IJ=53, 1529 MATOP_FDCOLORING_CREATE=54, 1530 MATOP_COLORING_PATCH=55, 1531 MATOP_SET_UNFACTORED=56, 1532 MATOP_PERMUTE=57, 1533 MATOP_SET_VALUES_BLOCKED=58, 1534 MATOP_GET_SUBMATRIX=59, 1535 MATOP_DESTROY=60, 1536 MATOP_VIEW=61, 1537 MATOP_CONVERT_FROM=62, 1538 MATOP_USE_SCALED_FORM=63, 1539 MATOP_SCALE_SYSTEM=64, 1540 MATOP_UNSCALE_SYSTEM=65, 1541 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1542 MATOP_SET_VALUES_LOCAL=67, 1543 MATOP_ZERO_ROWS_LOCAL=68, 1544 MATOP_GET_ROW_MAX_ABS=69, 1545 MATOP_GET_ROW_MIN_ABS=70, 1546 MATOP_CONVERT=71, 1547 MATOP_SET_COLORING=72, 1548 MATOP_SET_VALUES_ADIC=73, 1549 MATOP_SET_VALUES_ADIFOR=74, 1550 MATOP_FD_COLORING_APPLY=75, 1551 MATOP_SET_FROM_OPTIONS=76, 1552 MATOP_MULT_CON=77, 1553 MATOP_MULT_TRANSPOSE_CON=78, 1554 MATOP_PERMUTE_SPARSIFY=79, 1555 MATOP_MULT_MULTIPLE=80, 1556 MATOP_SOLVE_MULTIPLE=81, 1557 MATOP_GET_INERTIA=82, 1558 MATOP_LOAD=83, 1559 MATOP_IS_SYMMETRIC=84, 1560 MATOP_IS_HERMITIAN=85, 1561 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1562 MATOP_DUMMY=87, 1563 MATOP_GET_VECS=88, 1564 MATOP_MAT_MULT=89, 1565 MATOP_MAT_MULT_SYMBOLIC=90, 1566 MATOP_MAT_MULT_NUMERIC=91, 1567 MATOP_PTAP=92, 1568 MATOP_PTAP_SYMBOLIC=93, 1569 MATOP_PTAP_NUMERIC=94, 1570 MATOP_MAT_MULTTRANSPOSE=95, 1571 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1572 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1573 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1574 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1575 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1576 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1577 MATOP_CONJUGATE=102, 1578 MATOP_SET_SIZES=103, 1579 MATOP_SET_VALUES_ROW=104, 1580 MATOP_REAL_PART=105, 1581 MATOP_IMAG_PART=106, 1582 MATOP_GET_ROW_UTRIANGULAR=107, 1583 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1584 MATOP_MATSOLVE=109, 1585 MATOP_GET_REDUNDANTMATRIX=110, 1586 MATOP_GET_ROW_MIN=111, 1587 MATOP_GET_COLUMN_VEC=112, 1588 MATOP_MISSING_DIAGONAL=113, 1589 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1590 MATOP_CREATE=115, 1591 MATOP_GET_GHOSTS=116, 1592 MATOP_GET_LOCALSUBMATRIX=117, 1593 MATOP_RESTORE_LOCALSUBMATRIX=118, 1594 MATOP_MULT_DIAGONAL_BLOCK=119, 1595 MATOP_HERMITIANTRANSPOSE=120, 1596 MATOP_MULTHERMITIANTRANSPOSE=121, 1597 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1598 MATOP_GETMULTIPROCBLOCK=123, 1599 MATOP_GETCOLUMNNORMS=125, 1600 MATOP_GET_SUBMATRICES_PARALLEL=128, 1601 MATOP_SET_VALUES_BATCH=129, 1602 MATOP_TRANSPOSEMATMULT=130, 1603 MATOP_TRANSPOSEMATMULT_SYMBOLIC=131, 1604 MATOP_TRANSPOSEMATMULT_NUMERIC=132, 1605 MATOP_TRANSPOSECOLORING_CREATE=133, 1606 MATOP_TRANSCOLORING_APPLY_SPTODEN=134, 1607 MATOP_TRANSCOLORING_APPLY_DENTOSP=135, 1608 MATOP_RARt=136, 1609 MATOP_RARt_SYMBOLIC=137, 1610 MATOP_RARt_NUMERIC=138, 1611 MATOP_SET_BLOCK_SIZES=139 1612 } MatOperation; 1613 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1614 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1615 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1616 extern PetscErrorCode MatShellSetContext(Mat,void*); 1617 1618 /* 1619 Codes for matrices stored on disk. By default they are 1620 stored in a universal format. By changing the format with 1621 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1622 be stored in a way natural for the matrix, for example dense matrices 1623 would be stored as dense. Matrices stored this way may only be 1624 read into matrices of the same type. 1625 */ 1626 #define MATRIX_BINARY_FORMAT_DENSE -1 1627 1628 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1629 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1630 extern PetscErrorCode MatISSetLocalMat(Mat,Mat); 1631 1632 /*S 1633 MatNullSpace - Object that removes a null space from a vector, i.e. 1634 orthogonalizes the vector to a subsapce 1635 1636 Level: advanced 1637 1638 Concepts: matrix; linear operator, null space 1639 1640 Users manual sections: 1641 . sec_singular 1642 1643 .seealso: MatNullSpaceCreate() 1644 S*/ 1645 typedef struct _p_MatNullSpace* MatNullSpace; 1646 1647 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1648 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1649 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1650 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1651 extern PetscErrorCode MatGetNullSpace(Mat, MatNullSpace *); 1652 extern PetscErrorCode MatSetNullSpace(Mat,MatNullSpace); 1653 extern PetscErrorCode MatSetNearNullSpace(Mat,MatNullSpace); 1654 extern PetscErrorCode MatGetNearNullSpace(Mat,MatNullSpace*); 1655 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1656 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1657 extern PetscErrorCode MatNullSpaceGetVecs(MatNullSpace,PetscBool*,PetscInt*,const Vec**); 1658 extern PetscErrorCode MatNullSpaceCreateRigidBody(Vec,MatNullSpace*); 1659 1660 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1661 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1662 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1663 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1664 1665 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1666 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1667 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1668 1669 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1670 1671 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1672 1673 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1674 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1675 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1676 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1677 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1678 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1679 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1680 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1681 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1682 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1683 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1684 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1685 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1686 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1687 1688 /*S 1689 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1690 Jacobian vector products 1691 1692 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1693 1694 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1695 1696 Level: developer 1697 1698 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1699 S*/ 1700 typedef struct _p_MatMFFD* MatMFFD; 1701 1702 /*J 1703 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1704 1705 Level: beginner 1706 1707 .seealso: MatMFFDSetType(), MatMFFDRegister() 1708 J*/ 1709 #define MatMFFDType char* 1710 #define MATMFFD_DS "ds" 1711 #define MATMFFD_WP "wp" 1712 1713 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1714 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1715 1716 /*MC 1717 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1718 1719 Synopsis: 1720 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1721 1722 Not Collective 1723 1724 Input Parameters: 1725 + name_solver - name of a new user-defined compute-h module 1726 . path - path (either absolute or relative) the library containing this solver 1727 . name_create - name of routine to create method context 1728 - routine_create - routine to create method context 1729 1730 Level: developer 1731 1732 Notes: 1733 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1734 1735 If dynamic libraries are used, then the fourth input argument (routine_create) 1736 is ignored. 1737 1738 Sample usage: 1739 .vb 1740 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1741 "MyHCreate",MyHCreate); 1742 .ve 1743 1744 Then, your solver can be chosen with the procedural interface via 1745 $ MatMFFDSetType(mfctx,"my_h") 1746 or at runtime via the option 1747 $ -snes_mf_type my_h 1748 1749 .keywords: MatMFFD, register 1750 1751 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1752 M*/ 1753 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1754 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1755 #else 1756 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1757 #endif 1758 1759 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1760 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1761 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1762 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1763 1764 1765 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1766 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1767 1768 /* 1769 PETSc interface to MUMPS 1770 */ 1771 #ifdef PETSC_HAVE_MUMPS 1772 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1773 #endif 1774 1775 /* 1776 PETSc interface to SUPERLU 1777 */ 1778 #ifdef PETSC_HAVE_SUPERLU 1779 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1780 #endif 1781 1782 #if defined PETSC_HAVE_CUDA 1783 #define GPUStorageFormat char* 1784 #define CSR "csr" 1785 #define DIA "dia" 1786 #define ELL "ell" 1787 #define HYB "hyb" 1788 #endif 1789 1790 #if defined PETSC_HAVE_TXPETSCGPU 1791 extern PetscErrorCode MatCreateSeqAIJCUSPARSE(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1792 extern PetscErrorCode MatCreateAIJCUSPARSE(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1793 extern PetscErrorCode MatAIJCUSPARSESetGPUStorageFormatForMatMult(Mat,const GPUStorageFormat); 1794 extern PetscErrorCode MatAIJCUSPARSESetGPUStorageFormatForMatSolve(Mat,const GPUStorageFormat); 1795 #endif 1796 1797 #if defined(PETSC_HAVE_CUSP) 1798 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1799 extern PetscErrorCode MatCreateAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1800 extern PetscErrorCode MatAIJCUSPSetGPUStorageFormatForMatMult(Mat,const GPUStorageFormat); 1801 #endif 1802 1803 /* 1804 PETSc interface to FFTW 1805 */ 1806 #if defined(PETSC_HAVE_FFTW) 1807 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec); 1808 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec); 1809 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*); 1810 #endif 1811 1812 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1813 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1814 extern PetscErrorCode MatNestGetISs(Mat,IS[],IS[]); 1815 extern PetscErrorCode MatNestGetLocalISs(Mat,IS[],IS[]); 1816 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1817 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1818 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1819 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1820 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1821 1822 /* 1823 MatIJ: 1824 An unweighted directed pseudograph 1825 An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it: 1826 A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs 1827 by mapping the indices to the indices connected to them by the (pseudo)graph ed 1828 */ 1829 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType; 1830 extern PetscErrorCode MatIJSetMultivalued(Mat, PetscBool); 1831 extern PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*); 1832 extern PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*); 1833 extern PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **); 1834 extern PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS); 1835 extern PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*); 1836 extern PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **); 1837 extern PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *); 1838 extern PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *); 1839 extern PetscErrorCode MatIJGetSupport(Mat, PetscInt *, PetscInt **); 1840 extern PetscErrorCode MatIJGetSupportIS(Mat, IS *); 1841 extern PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**); 1842 extern PetscErrorCode MatIJGetImageIS(Mat, IS *); 1843 extern PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *); 1844 extern PetscErrorCode MatIJGetImageSize(Mat, PetscInt *); 1845 1846 extern PetscErrorCode MatIJBinRenumber(Mat, Mat*); 1847 1848 extern PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1849 extern PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1850 extern PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1851 1852 PETSC_EXTERN_CXX_END 1853 #endif 1854