#include <petsc/private/pcisimpl.h> /*I "petscpc.h" I*/

static PetscErrorCode PCISSetUseStiffnessScaling_IS(PC pc, PetscBool use)
{
  PC_IS *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  pcis->use_stiffness_scaling = use;
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISSetUseStiffnessScaling - Tells `PCIS` to construct partition of unity using
  the local matrices' diagonal entries

  Logically Collective

  Input Parameters:
+ pc  - the preconditioning context
- use - whether or not it should use matrix diagonal to build partition of unity.

  Level: intermediate

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`,
          `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
@*/
PetscErrorCode PCISSetUseStiffnessScaling(PC pc, PetscBool use)
{
  PetscFunctionBegin;
  PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
  PetscValidLogicalCollectiveBool(pc, use, 2);
  PetscTryMethod(pc, "PCISSetUseStiffnessScaling_C", (PC, PetscBool), (pc, use));
  PetscFunctionReturn(PETSC_SUCCESS);
}

static PetscErrorCode PCISSetSubdomainDiagonalScaling_IS(PC pc, Vec scaling_factors)
{
  PC_IS *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  PetscCall(PetscObjectReference((PetscObject)scaling_factors));
  PetscCall(VecDestroy(&pcis->D));
  pcis->D = scaling_factors;
  if (pc->setupcalled) {
    PetscInt sn;

    PetscCall(VecGetSize(pcis->D, &sn));
    if (sn == pcis->n) {
      PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
      PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
      PetscCall(VecDestroy(&pcis->D));
      PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
      PetscCall(VecCopy(pcis->vec1_B, pcis->D));
    } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
  }
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISSetSubdomainDiagonalScaling - Set diagonal scaling for `PCIS`.

  Logically Collective

  Input Parameters:
+ pc              - the preconditioning context
- scaling_factors - scaling factors for the subdomain

  Level: intermediate

  Note:
  Intended for use with jumping coefficients cases.

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`, `PCISSetUseStiffnessScaling()`,
          `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
@*/
PetscErrorCode PCISSetSubdomainDiagonalScaling(PC pc, Vec scaling_factors)
{
  PetscFunctionBegin;
  PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
  PetscValidHeaderSpecific(scaling_factors, VEC_CLASSID, 2);
  PetscTryMethod(pc, "PCISSetSubdomainDiagonalScaling_C", (PC, Vec), (pc, scaling_factors));
  PetscFunctionReturn(PETSC_SUCCESS);
}

static PetscErrorCode PCISSetSubdomainScalingFactor_IS(PC pc, PetscScalar scal)
{
  PC_IS *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  pcis->scaling_factor = scal;
  if (pcis->D) PetscCall(VecSet(pcis->D, pcis->scaling_factor));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISSetSubdomainScalingFactor - Set scaling factor for `PCIS`.

  Not Collective

  Input Parameters:
+ pc   - the preconditioning context
- scal - scaling factor for the subdomain

  Level: intermediate

  Note:
  Intended for use with the jumping coefficients cases.

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainDiagonalScaling()`, `PCISSetUseStiffnessScaling()`,
          `PCISReset()`, `PCISInitialize()`, `PCISApplyInvSchur()`, `PCISApplySchur()`
@*/
PetscErrorCode PCISSetSubdomainScalingFactor(PC pc, PetscScalar scal)
{
  PetscFunctionBegin;
  PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
  PetscTryMethod(pc, "PCISSetSubdomainScalingFactor_C", (PC, PetscScalar), (pc, scal));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISSetUp - sets up the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context as part of their setup process

  Input Parameters:
+ pc              - the `PC` object, must be of type `PCNN` or `PCBDDC`
. computematrices - Extract the blocks `A_II`, `A_BI`, `A_IB` and `A_BB` from the matrix
- computesolvers  - Create the `KSP` for the local Dirichlet and Neumann problems

  Level: advanced

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`,
          `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
@*/
PetscErrorCode PCISSetUp(PC pc, PetscBool computematrices, PetscBool computesolvers)
{
  PC_IS    *pcis = (PC_IS *)pc->data;
  Mat_IS   *matis;
  MatReuse  reuse;
  PetscBool flg, issbaij;

  PetscFunctionBegin;
  PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &flg));
  PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires matrix for constructing the preconditioner of type MATIS");
  matis = (Mat_IS *)pc->pmat->data;
  if (pc->useAmat) {
    PetscCall(PetscObjectTypeCompare((PetscObject)pc->mat, MATIS, &flg));
    PetscCheck(flg, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Requires linear system matrix of type MATIS");
  }

  /* first time creation, get info on substructuring */
  if (!pc->setupcalled) {
    PetscInt  n_I;
    PetscInt *idx_I_local, *idx_B_local, *idx_I_global, *idx_B_global, *count;
    PetscInt  i;

    /* get info on mapping */
    PetscCall(PetscObjectReference((PetscObject)matis->rmapping));
    PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
    pcis->mapping = matis->rmapping;
    PetscCall(ISLocalToGlobalMappingGetSize(pcis->mapping, &pcis->n));
    PetscCall(ISLocalToGlobalMappingGetInfo(pcis->mapping, &pcis->n_neigh, &pcis->neigh, &pcis->n_shared, &pcis->shared));

    /* Identifying interior and interface nodes, in local numbering */
    PetscCall(ISLocalToGlobalMappingGetNodeInfo(pcis->mapping, NULL, &count, NULL));
    PetscCall(PetscMalloc1(pcis->n, &idx_I_local));
    PetscCall(PetscMalloc1(pcis->n, &idx_B_local));
    for (i = 0, pcis->n_B = 0, n_I = 0; i < pcis->n; i++) {
      if (count[i] < 2) {
        idx_I_local[n_I] = i;
        n_I++;
      } else {
        idx_B_local[pcis->n_B] = i;
        pcis->n_B++;
      }
    }
    PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(pcis->mapping, NULL, &count, NULL));

    /* Getting the global numbering */
    idx_B_global = PetscSafePointerPlusOffset(idx_I_local, n_I); /* Just avoiding allocating extra memory, since we have vacant space */
    idx_I_global = PetscSafePointerPlusOffset(idx_B_local, pcis->n_B);
    PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, pcis->n_B, idx_B_local, idx_B_global));
    PetscCall(ISLocalToGlobalMappingApply(pcis->mapping, n_I, idx_I_local, idx_I_global));

    /* Creating the index sets */
    PetscCall(ISCreateGeneral(PETSC_COMM_SELF, pcis->n_B, idx_B_local, PETSC_COPY_VALUES, &pcis->is_B_local));
    PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), pcis->n_B, idx_B_global, PETSC_COPY_VALUES, &pcis->is_B_global));
    PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n_I, idx_I_local, PETSC_COPY_VALUES, &pcis->is_I_local));
    PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)pc), n_I, idx_I_global, PETSC_COPY_VALUES, &pcis->is_I_global));

    /* Freeing memory */
    PetscCall(PetscFree(idx_B_local));
    PetscCall(PetscFree(idx_I_local));

    /* Creating work vectors and arrays */
    PetscCall(VecDuplicate(matis->x, &pcis->vec1_N));
    PetscCall(VecDuplicate(pcis->vec1_N, &pcis->vec2_N));
    PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_D));
    PetscCall(VecSetSizes(pcis->vec1_D, pcis->n - pcis->n_B, PETSC_DECIDE));
    PetscCall(VecSetType(pcis->vec1_D, ((PetscObject)pcis->vec1_N)->type_name));
    PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec2_D));
    PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec3_D));
    PetscCall(VecDuplicate(pcis->vec1_D, &pcis->vec4_D));
    PetscCall(VecCreate(PETSC_COMM_SELF, &pcis->vec1_B));
    PetscCall(VecSetSizes(pcis->vec1_B, pcis->n_B, PETSC_DECIDE));
    PetscCall(VecSetType(pcis->vec1_B, ((PetscObject)pcis->vec1_N)->type_name));
    PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec2_B));
    PetscCall(VecDuplicate(pcis->vec1_B, &pcis->vec3_B));
    PetscCall(MatCreateVecs(pc->pmat, &pcis->vec1_global, NULL));
    PetscCall(PetscMalloc1(pcis->n, &pcis->work_N));
    /* scaling vector */
    if (!pcis->D) { /* it can happen that the user passed in a scaling vector via PCISSetSubdomainDiagonalScaling */
      PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
      PetscCall(VecSet(pcis->D, pcis->scaling_factor));
    }

    /* Creating the scatter contexts */
    PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_I_local, pcis->vec1_D, (IS)0, &pcis->N_to_D));
    PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_I_global, pcis->vec1_D, (IS)0, &pcis->global_to_D));
    PetscCall(VecScatterCreate(pcis->vec1_N, pcis->is_B_local, pcis->vec1_B, (IS)0, &pcis->N_to_B));
    PetscCall(VecScatterCreate(pcis->vec1_global, pcis->is_B_global, pcis->vec1_B, (IS)0, &pcis->global_to_B));

    /* map from boundary to local */
    PetscCall(ISLocalToGlobalMappingCreateIS(pcis->is_B_local, &pcis->BtoNmap));
  }

  {
    PetscInt sn;

    PetscCall(VecGetSize(pcis->D, &sn));
    if (sn == pcis->n) {
      PetscCall(VecScatterBegin(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
      PetscCall(VecScatterEnd(pcis->N_to_B, pcis->D, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
      PetscCall(VecDestroy(&pcis->D));
      PetscCall(VecDuplicate(pcis->vec1_B, &pcis->D));
      PetscCall(VecCopy(pcis->vec1_B, pcis->D));
    } else PetscCheck(sn == pcis->n_B, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Invalid size for scaling vector. Expected %" PetscInt_FMT " (or full %" PetscInt_FMT "), found %" PetscInt_FMT, pcis->n_B, pcis->n, sn);
  }

  /*
    Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
    is such that interior nodes come first than the interface ones, we have

        [ A_II | A_IB ]
    A = [------+------]
        [ A_BI | A_BB ]
  */
  if (computematrices) {
    PetscBool amat = (PetscBool)(pc->mat != pc->pmat && pc->useAmat);
    PetscInt  bs, ibs;

    reuse = MAT_INITIAL_MATRIX;
    if (pcis->reusesubmatrices && pc->setupcalled) {
      if (pc->flag == SAME_NONZERO_PATTERN) {
        reuse = MAT_REUSE_MATRIX;
      } else {
        reuse = MAT_INITIAL_MATRIX;
      }
    }
    if (reuse == MAT_INITIAL_MATRIX) {
      PetscCall(MatDestroy(&pcis->A_II));
      PetscCall(MatDestroy(&pcis->pA_II));
      PetscCall(MatDestroy(&pcis->A_IB));
      PetscCall(MatDestroy(&pcis->A_BI));
      PetscCall(MatDestroy(&pcis->A_BB));
    }

    PetscCall(ISLocalToGlobalMappingGetBlockSize(pcis->mapping, &ibs));
    PetscCall(MatGetBlockSize(matis->A, &bs));
    PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->pA_II));
    if (amat) {
      Mat_IS *amatis = (Mat_IS *)pc->mat->data;
      PetscCall(MatCreateSubMatrix(amatis->A, pcis->is_I_local, pcis->is_I_local, reuse, &pcis->A_II));
    } else {
      PetscCall(PetscObjectReference((PetscObject)pcis->pA_II));
      PetscCall(MatDestroy(&pcis->A_II));
      pcis->A_II = pcis->pA_II;
    }
    PetscCall(MatSetBlockSize(pcis->A_II, bs == ibs ? bs : 1));
    PetscCall(MatSetBlockSize(pcis->pA_II, bs == ibs ? bs : 1));
    PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_B_local, reuse, &pcis->A_BB));
    PetscCall(PetscObjectTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
    if (!issbaij) {
      PetscCall(MatCreateSubMatrix(matis->A, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
      PetscCall(MatCreateSubMatrix(matis->A, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
    } else {
      Mat newmat;

      PetscCall(MatConvert(matis->A, MATSEQBAIJ, MAT_INITIAL_MATRIX, &newmat));
      PetscCall(MatCreateSubMatrix(newmat, pcis->is_I_local, pcis->is_B_local, reuse, &pcis->A_IB));
      PetscCall(MatCreateSubMatrix(newmat, pcis->is_B_local, pcis->is_I_local, reuse, &pcis->A_BI));
      PetscCall(MatDestroy(&newmat));
    }
    PetscCall(MatSetBlockSize(pcis->A_BB, bs == ibs ? bs : 1));
  }

  /* Creating scaling vector D */
  PetscCall(PetscOptionsGetBool(((PetscObject)pc)->options, ((PetscObject)pc)->prefix, "-pc_is_use_stiffness_scaling", &pcis->use_stiffness_scaling, NULL));
  if (pcis->use_stiffness_scaling) {
    PetscScalar *a;
    PetscInt     i, n;

    if (pcis->A_BB) {
      PetscCall(MatGetDiagonal(pcis->A_BB, pcis->D));
    } else {
      PetscCall(MatGetDiagonal(matis->A, pcis->vec1_N));
      PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
      PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_N, pcis->D, INSERT_VALUES, SCATTER_FORWARD));
    }
    PetscCall(VecAbs(pcis->D));
    PetscCall(VecGetLocalSize(pcis->D, &n));
    PetscCall(VecGetArray(pcis->D, &a));
    for (i = 0; i < n; i++)
      if (PetscAbsScalar(a[i]) < PETSC_SMALL) a[i] = 1.0;
    PetscCall(VecRestoreArray(pcis->D, &a));
  }
  PetscCall(VecSet(pcis->vec1_global, 0.0));
  PetscCall(VecScatterBegin(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
  PetscCall(VecScatterEnd(pcis->global_to_B, pcis->D, pcis->vec1_global, ADD_VALUES, SCATTER_REVERSE));
  PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
  PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_global, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
  PetscCall(VecPointwiseDivide(pcis->D, pcis->D, pcis->vec1_B));

  /* Creating the KSP contexts for the local Dirichlet and Neumann problems */
  if (computesolvers) {
    PC pc_ctx;

    pcis->pure_neumann = matis->pure_neumann;
    /* Dirichlet */
    PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_D));
    PetscCall(KSPSetNestLevel(pcis->ksp_D, pc->kspnestlevel));
    PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_D, pc->erroriffailure));
    PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D, (PetscObject)pc, 1));
    PetscCall(KSPSetOperators(pcis->ksp_D, pcis->A_II, pcis->A_II));
    PetscCall(KSPSetOptionsPrefix(pcis->ksp_D, "is_localD_"));
    PetscCall(KSPGetPC(pcis->ksp_D, &pc_ctx));
    PetscCall(PCSetType(pc_ctx, PCLU));
    PetscCall(KSPSetType(pcis->ksp_D, KSPPREONLY));
    PetscCall(KSPSetFromOptions(pcis->ksp_D));
    /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
    PetscCall(KSPSetUp(pcis->ksp_D));
    /* Neumann */
    PetscCall(KSPCreate(PETSC_COMM_SELF, &pcis->ksp_N));
    PetscCall(KSPSetNestLevel(pcis->ksp_N, pc->kspnestlevel));
    PetscCall(KSPSetErrorIfNotConverged(pcis->ksp_N, pc->erroriffailure));
    PetscCall(PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N, (PetscObject)pc, 1));
    PetscCall(KSPSetOperators(pcis->ksp_N, matis->A, matis->A));
    PetscCall(KSPSetOptionsPrefix(pcis->ksp_N, "is_localN_"));
    PetscCall(KSPGetPC(pcis->ksp_N, &pc_ctx));
    PetscCall(PCSetType(pc_ctx, PCLU));
    PetscCall(KSPSetType(pcis->ksp_N, KSPPREONLY));
    PetscCall(KSPSetFromOptions(pcis->ksp_N));
    {
      PetscBool damp_fixed = PETSC_FALSE, remove_nullspace_fixed = PETSC_FALSE, set_damping_factor_floating = PETSC_FALSE, not_damp_floating = PETSC_FALSE, not_remove_nullspace_floating = PETSC_FALSE;
      PetscReal fixed_factor, floating_factor;

      PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &fixed_factor, &damp_fixed));
      if (!damp_fixed) fixed_factor = 0.0;
      PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_damp_fixed", &damp_fixed, NULL));

      PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_remove_nullspace_fixed", &remove_nullspace_fixed, NULL));

      PetscCall(PetscOptionsGetReal(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &floating_factor, &set_damping_factor_floating));
      if (!set_damping_factor_floating) floating_factor = 0.0;
      PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_set_damping_factor_floating", &set_damping_factor_floating, NULL));
      if (!set_damping_factor_floating) floating_factor = 1.e-12;

      PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_damp_floating", &not_damp_floating, NULL));

      PetscCall(PetscOptionsGetBool(((PetscObject)pc_ctx)->options, ((PetscObject)pc_ctx)->prefix, "-pc_is_not_remove_nullspace_floating", &not_remove_nullspace_floating, NULL));

      if (pcis->pure_neumann) { /* floating subdomain */
        if (!(not_damp_floating)) {
          PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
          PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
        }
        if (!(not_remove_nullspace_floating)) {
          MatNullSpace nullsp;
          PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
          PetscCall(MatSetNullSpace(matis->A, nullsp));
          PetscCall(MatNullSpaceDestroy(&nullsp));
        }
      } else { /* fixed subdomain */
        if (damp_fixed) {
          PetscCall(PCFactorSetShiftType(pc_ctx, MAT_SHIFT_NONZERO));
          PetscCall(PCFactorSetShiftAmount(pc_ctx, floating_factor));
        }
        if (remove_nullspace_fixed) {
          MatNullSpace nullsp;
          PetscCall(MatNullSpaceCreate(PETSC_COMM_SELF, PETSC_TRUE, 0, NULL, &nullsp));
          PetscCall(MatSetNullSpace(matis->A, nullsp));
          PetscCall(MatNullSpaceDestroy(&nullsp));
        }
      }
    }
    /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
    PetscCall(KSPSetUp(pcis->ksp_N));
  }
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISReset - Removes all the `PC_IS` parts of the `PC` implementation data structure

  Input Parameter:
. pc - the `PC` object, must be of type `PCNN` or `PCBDDC`

  Level: advanced

.seealso: [](ch_ksp), `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`, `PCISSetSubdomainScalingFactor()`,
          `PCISInitialize()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
@*/
PetscErrorCode PCISReset(PC pc)
{
  PC_IS    *pcis = (PC_IS *)pc->data;
  PetscBool correcttype;

  PetscFunctionBegin;
  if (!pc) PetscFunctionReturn(PETSC_SUCCESS);
  PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
  PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
  PetscCall(ISDestroy(&pcis->is_B_local));
  PetscCall(ISDestroy(&pcis->is_I_local));
  PetscCall(ISDestroy(&pcis->is_B_global));
  PetscCall(ISDestroy(&pcis->is_I_global));
  PetscCall(MatDestroy(&pcis->A_II));
  PetscCall(MatDestroy(&pcis->pA_II));
  PetscCall(MatDestroy(&pcis->A_IB));
  PetscCall(MatDestroy(&pcis->A_BI));
  PetscCall(MatDestroy(&pcis->A_BB));
  PetscCall(VecDestroy(&pcis->D));
  PetscCall(KSPDestroy(&pcis->ksp_N));
  PetscCall(KSPDestroy(&pcis->ksp_D));
  PetscCall(VecDestroy(&pcis->vec1_N));
  PetscCall(VecDestroy(&pcis->vec2_N));
  PetscCall(VecDestroy(&pcis->vec1_D));
  PetscCall(VecDestroy(&pcis->vec2_D));
  PetscCall(VecDestroy(&pcis->vec3_D));
  PetscCall(VecDestroy(&pcis->vec4_D));
  PetscCall(VecDestroy(&pcis->vec1_B));
  PetscCall(VecDestroy(&pcis->vec2_B));
  PetscCall(VecDestroy(&pcis->vec3_B));
  PetscCall(VecDestroy(&pcis->vec1_global));
  PetscCall(VecScatterDestroy(&pcis->global_to_D));
  PetscCall(VecScatterDestroy(&pcis->N_to_B));
  PetscCall(VecScatterDestroy(&pcis->N_to_D));
  PetscCall(VecScatterDestroy(&pcis->global_to_B));
  PetscCall(PetscFree(pcis->work_N));
  if (pcis->n_neigh > -1) PetscCall(ISLocalToGlobalMappingRestoreInfo(pcis->mapping, &pcis->n_neigh, &pcis->neigh, &pcis->n_shared, &pcis->shared));
  PetscCall(ISLocalToGlobalMappingDestroy(&pcis->mapping));
  PetscCall(ISLocalToGlobalMappingDestroy(&pcis->BtoNmap));
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", NULL));
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", NULL));
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", NULL));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISInitialize - initializes the `PC_IS` portion of `PCNN` and `PCBDDC` preconditioner context

  Input Parameter:
. pc - the `PC` object, must be of type `PCNN` or `PCBDDC`

  Level: advanced

  Note:
  There is no preconditioner the `PCIS` prefixed routines provide functionality needed by `PCNN` or `PCBDDC`

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`,
          `PCISReset()`, `PCISApplySchur()`, `PCISApplyInvSchur()`
@*/
PetscErrorCode PCISInitialize(PC pc)
{
  PC_IS    *pcis = (PC_IS *)pc->data;
  PetscBool correcttype;

  PetscFunctionBegin;
  PetscCheck(pcis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC_IS context must be created by caller");
  PetscCall(PetscObjectTypeCompareAny((PetscObject)pc, &correcttype, PCBDDC, PCNN, ""));
  PetscCheck(correcttype, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONGSTATE, "PC must be of type PCNN or PCBDDC");
  pcis->n_neigh          = -1;
  pcis->scaling_factor   = 1.0;
  pcis->reusesubmatrices = PETSC_TRUE;
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetUseStiffnessScaling_C", PCISSetUseStiffnessScaling_IS));
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainScalingFactor_C", PCISSetSubdomainScalingFactor_IS));
  PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCISSetSubdomainDiagonalScaling_C", PCISSetSubdomainDiagonalScaling_IS));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISApplySchur - applies the Schur complement arising from the `MATIS` inside the `PCNN` preconditioner

  Input Parameters:
+ pc     - preconditioner context
. v      - vector to which the Schur complement is to be applied (it is NOT modified inside this function, UNLESS vec2_B is null)
. vec1_B - location to store the result of Schur complement applied to chunk
. vec2_B - workspace or `NULL`, `v` is used as workspace in that case
. vec1_D - work space
- vec2_D - work space

  Level: advanced

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`, `PCISApplyInvSchur()`,
          `PCISReset()`, `PCISInitialize()`
@*/
PetscErrorCode PCISApplySchur(PC pc, Vec v, Vec vec1_B, Vec vec2_B, Vec vec1_D, Vec vec2_D)
{
  PC_IS *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  if (!vec2_B) vec2_B = v;

  PetscCall(MatMult(pcis->A_BB, v, vec1_B));
  PetscCall(MatMult(pcis->A_IB, v, vec1_D));
  PetscCall(KSPSolve(pcis->ksp_D, vec1_D, vec2_D));
  PetscCall(KSPCheckSolve(pcis->ksp_D, pc, vec2_D));
  PetscCall(MatMult(pcis->A_BI, vec2_D, vec2_B));
  PetscCall(VecAXPY(vec1_B, -1.0, vec2_B));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISScatterArrayNToVecB - Scatters interface node values from a big array (of all local nodes, interior or interface,
  including ghosts) into an interface vector, when in `SCATTER_FORWARD` mode, or vice-versa, when in `SCATTER_REVERSE`
  mode.

  Input Parameters:
+ pc      - preconditioner context
. array_N - [when in `SCATTER_FORWARD` mode] Array to be scattered into the vector otherwise output array
. imode   - insert mode, `ADD_VALUES` or `INSERT_VALUES`
. smode   - scatter mode, `SCATTER_FORWARD` or `SCATTER_REVERSE` mode]
- v_B     - [when in `SCATTER_REVERSE` mode] Vector to be scattered into the array, otherwise output vector

  Level: advanced

  Note:
  The entries in the array that do not correspond to interface nodes remain unaltered.

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`,
          `PCISSetSubdomainScalingFactor()`, `PCISApplySchur()`, `PCISApplyInvSchur()`,
          `PCISReset()`, `PCISInitialize()`, `InsertMode`
@*/
PetscErrorCode PCISScatterArrayNToVecB(PC pc, PetscScalar *array_N, Vec v_B, InsertMode imode, ScatterMode smode)
{
  PetscInt        i;
  const PetscInt *idex;
  PetscScalar    *array_B;
  PC_IS          *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  PetscCall(VecGetArray(v_B, &array_B));
  PetscCall(ISGetIndices(pcis->is_B_local, &idex));

  if (smode == SCATTER_FORWARD) {
    if (imode == INSERT_VALUES) {
      for (i = 0; i < pcis->n_B; i++) array_B[i] = array_N[idex[i]];
    } else { /* ADD_VALUES */
      for (i = 0; i < pcis->n_B; i++) array_B[i] += array_N[idex[i]];
    }
  } else { /* SCATTER_REVERSE */
    if (imode == INSERT_VALUES) {
      for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] = array_B[i];
    } else { /* ADD_VALUES */
      for (i = 0; i < pcis->n_B; i++) array_N[idex[i]] += array_B[i];
    }
  }
  PetscCall(ISRestoreIndices(pcis->is_B_local, &idex));
  PetscCall(VecRestoreArray(v_B, &array_B));
  PetscFunctionReturn(PETSC_SUCCESS);
}

/*@
  PCISApplyInvSchur - Solves the Neumann problem related to applying the inverse of the Schur complement.

  Input Parameters:
+ pc     - preconditioner context
. b      - vector of local interface nodes (including ghosts)
. x      - vector of local interface nodes (including ghosts); returns the application of the inverse of the Schur complement to `b`
. vec1_N - vector of local nodes (interior and interface, including ghosts); used as work space
- vec2_N - vector of local nodes (interior and interface, including ghosts); used as work space

  Level: advanced

  Note:
  Solves the problem
.vb
  [ A_II  A_IB ] [ . ]   [ 0 ]
  [            ] [   ] = [   ]
  [ A_BI  A_BB ] [ x ]   [ b ]
.ve

.seealso: [](ch_ksp), `PCBDDC`, `PCNN`, `PCISSetUseStiffnessScaling()`, `PCISSetSubdomainDiagonalScaling()`, `PCISScatterArrayNToVecB()`,
          `PCISSetSubdomainScalingFactor()`,
          `PCISReset()`, `PCISInitialize()`
@*/
PetscErrorCode PCISApplyInvSchur(PC pc, Vec b, Vec x, Vec vec1_N, Vec vec2_N)
{
  PC_IS *pcis = (PC_IS *)pc->data;

  PetscFunctionBegin;
  /*
    Neumann solvers.
    Applying the inverse of the local Schur complement, i.e, solving a Neumann
    Problem with zero at the interior nodes of the RHS and extracting the interface
    part of the solution. inverse Schur complement is applied to b and the result
    is stored in x.
  */
  /* Setting the RHS vec1_N */
  PetscCall(VecSet(vec1_N, 0.0));
  PetscCall(VecScatterBegin(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
  PetscCall(VecScatterEnd(pcis->N_to_B, b, vec1_N, INSERT_VALUES, SCATTER_REVERSE));
  /* Checking for consistency of the RHS */
  {
    PetscBool flg = PETSC_FALSE;
    PetscCall(PetscOptionsGetBool(NULL, NULL, "-pc_is_check_consistency", &flg, NULL));
    if (flg) {
      PetscScalar average;
      PetscViewer viewer;
      PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc), &viewer));

      PetscCall(VecSum(vec1_N, &average));
      average = average / ((PetscReal)pcis->n);
      PetscCall(PetscViewerASCIIPushSynchronized(viewer));
      if (pcis->pure_neumann) {
        PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is floating. Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
      } else {
        PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "Subdomain %04d is fixed.    Average = % 1.14e\n", PetscGlobalRank, (double)PetscAbsScalar(average)));
      }
      PetscCall(PetscViewerFlush(viewer));
      PetscCall(PetscViewerASCIIPopSynchronized(viewer));
    }
  }
  /* Solving the system for vec2_N */
  PetscCall(KSPSolve(pcis->ksp_N, vec1_N, vec2_N));
  PetscCall(KSPCheckSolve(pcis->ksp_N, pc, vec2_N));
  /* Extracting the local interface vector out of the solution */
  PetscCall(VecScatterBegin(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
  PetscCall(VecScatterEnd(pcis->N_to_B, vec2_N, x, INSERT_VALUES, SCATTER_FORWARD));
  PetscFunctionReturn(PETSC_SUCCESS);
}
