Commit 4246111b authored by Thomas Witkowski's avatar Thomas Witkowski

Finished first phase of code refactoring to allow for multiple coarse space in...

Finished first phase of code refactoring to allow for multiple coarse space in domain decomposition methods.
parent 9a682f9d
......@@ -19,32 +19,47 @@ namespace AMDiS {
using namespace std;
ParallelCoarseSpaceMatVec::ParallelCoarseSpaceMatVec()
: rStartInterior(0),
nGlobalOverallInterior(0),
: interiorMap(NULL),
lastMeshNnz(0),
alwaysCreateNnzStructure(false)
alwaysCreateNnzStructure(false),
meshDistributor(NULL),
subdomainLevel(0),
rStartInterior(0),
nGlobalOverallInterior(0)
{
Parameters::get("parallel->always create nnz structure",
alwaysCreateNnzStructure);
}
void ParallelCoarseSpaceMatVec::init(ParallelDofMapping *iMap,
map<int, ParallelDofMapping*> cMap,
int sdLevel,
MPI::Intracomm mcLocal,
MPI::Intracomm mcGlobal,
MeshDistributor *meshDist)
void ParallelCoarseSpaceMatVec::setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component)
{
FUNCNAME("ParallelCoarseSpaceMatVec:init()");
interiorMap = iMap;
coarseSpaceMap = cMap;
subdomainLevel = sdLevel;
mpiCommLocal = mcLocal;
mpiCommGlobal = mcGlobal;
meshDistributor = meshDist;
FUNCNAME("ParallelCoarseSpaceMatVec::setCoarseSpaceDofMapping()");
TEST_EXIT_DBG(coarseDofs)("Should not happen!\n");
if (component == -1) {
// === Set coarse space for all components. ===
coarseSpaceMap.clear();
int nComponents = coarseDofs->getNumberOfComponents();
for (int i = 0; i < nComponents; i++)
coarseSpaceMap[i] = coarseDofs;
} else {
// === Set coarse space for just one component. ===
coarseSpaceMap[component] = coarseDofs;
}
}
void ParallelCoarseSpaceMatVec::prepare()
{
FUNCNAME("ParallelCoarseSpaceMatVec:prepare()");
// === Create vector of unique pointers to all coarse space maps. ===
uniqueCoarseMap.clear();
if (coarseSpaceMap.size()) {
......@@ -58,6 +73,9 @@ namespace AMDiS {
}
}
// === Create pointers to PETSc matrix and vector objects. ===
int nCoarseMap = uniqueCoarseMap.size();
mat.resize(nCoarseMap + 1);
for (int i = 0; i < nCoarseMap + 1; i++)
......@@ -66,6 +84,9 @@ namespace AMDiS {
vecSol.resize(nCoarseMap + 1);
vecRhs.resize(nCoarseMap + 1);
// === Create map from component number to its coarse space map. ===
componentIthCoarseMap.resize(coarseSpaceMap.size());
for (unsigned int i = 0; i < componentIthCoarseMap.size(); i++) {
bool found = false;
......@@ -82,16 +103,30 @@ namespace AMDiS {
}
void ParallelCoarseSpaceMatVec::create(Matrix<DOFMatrix*>& seqMat)
void ParallelCoarseSpaceMatVec::createMatVec(Matrix<DOFMatrix*>& seqMat)
{
FUNCNAME("ParallelCoarseSpaceMatVec::create()");
// === Prepare coarse space information and generate the correct number ===
// === of empty PETSc matrix and vector objects. ===
prepare();
// === Update subdomain data (mostly required for multi-level methods) ===
updateSubdomainData();
// === If required, recompute non zero structure of the matrix. ===
bool localMatrix = (coarseSpaceMap.size() && subdomainLevel == 0);
if (checkMeshChange(seqMat, localMatrix)) {
if (checkMeshChange()) {
// Mesh has been changed, recompute interior DOF mapping.
vector<const FiniteElemSpace*> feSpaces = getComponentFeSpaces(seqMat);
interiorMap->setComputeMatIndex(!localMatrix);
interiorMap->update(feSpaces);
int nMat = uniqueCoarseMap.size() + 1;
nnz.resize(nMat);
for (int i = 0; i < nMat; i++) {
......@@ -250,8 +285,7 @@ namespace AMDiS {
}
bool ParallelCoarseSpaceMatVec::checkMeshChange(Matrix<DOFMatrix*> &seqMat,
bool localMatrix)
bool ParallelCoarseSpaceMatVec::checkMeshChange()
{
FUNCNAME("ParallelCoarseSpaceMatVec::checkMeshChange()");
......@@ -261,13 +295,7 @@ namespace AMDiS {
mpiCommGlobal.Allreduce(&sendValue, &recvAllValues, 1, MPI_INT, MPI_SUM);
if (recvAllValues != 0 || alwaysCreateNnzStructure) {
vector<const FiniteElemSpace*> feSpaces = getComponentFeSpaces(seqMat);
interiorMap->setComputeMatIndex(!localMatrix);
interiorMap->update(feSpaces);
lastMeshNnz = meshDistributor->getLastMeshChangeIndex();
return true;
}
......
......@@ -21,10 +21,7 @@ namespace AMDiS {
using namespace std;
PetscSolver::PetscSolver()
: meshDistributor(NULL),
subdomainLevel(0),
interiorMap(NULL),
mpiRank(-1),
: ParallelCoarseSpaceMatVec(),
kspPrefix(""),
removeRhsNullspace(false),
hasConstantNullspace(false)
......@@ -41,25 +38,6 @@ namespace AMDiS {
}
void PetscSolver::setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component)
{
FUNCNAME("PetscSolver::setCoarseSpaceDofMapping()");
TEST_EXIT_DBG(coarseDofs)("Should not happen!\n");
if (component == -1) {
coarseSpaceMap.clear();
int nComponents = coarseDofs->getNumberOfComponents();
for (int i = 0; i < nComponents; i++)
coarseSpaceMap[i] = coarseDofs;
} else {
coarseSpaceMap[component] = coarseDofs;
}
}
void PetscSolver::solve(Vec &rhs, Vec &sol)
{
FUNCNAME("PetscSolver::solve()");
......
......@@ -42,38 +42,18 @@ namespace AMDiS {
using namespace std;
class PetscSolver
/**
* Create an abstract interface to an arbitrary PETSc solver. This class is
* based on \ref ParallelCoarseSpaceMatVec to support for solvers which make
* use of a coarse grid problem.
*/
class PetscSolver : public ParallelCoarseSpaceMatVec
{
public:
PetscSolver();
virtual ~PetscSolver() {}
void setMeshDistributor(MeshDistributor *m,
MPI::Intracomm mpiComm0,
MPI::Intracomm mpiComm1)
{
meshDistributor = m;
mpiCommGlobal = mpiComm0;
mpiCommLocal = mpiComm1;
mpiRank = mpiCommGlobal.Get_rank();
}
void setLevel(int l)
{
subdomainLevel = l;
}
/// Set parallel DOF mapping for the interior DOFs.
void setDofMapping(ParallelDofMapping *interiorDofs)
{
interiorMap = interiorDofs;
}
void setCoarseSpaceDofMapping(ParallelDofMapping *coarseDofs,
int component = -1);
/** \brief
* Create a PETSc matrix. The given DOF matrices are used to create the nnz
* structure of the PETSc matrix and the values are transfered to it.
......@@ -146,86 +126,6 @@ namespace AMDiS {
constNullspaceComponent.push_back(component);
}
inline bool isCoarseSpace(int component,
const FiniteElemSpace *feSpace,
DegreeOfFreedom dof)
{
FUNCNAME("PetscSolver::isCoarseSpace()");
if (coarseSpaceMap.empty())
return false;
TEST_EXIT_DBG(coarseSpaceMap.count(component))
("Component %d has no coarse space defined!\n", component);
return (*(coarseSpaceMap[component]))[feSpace].isSet(dof);
}
inline Vec& getRhsCoarseSpace()
{
FUNCNAME("PetscSolver::getRhsCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getCoarseVecRhs();
}
inline Vec& getRhsInterior()
{
return petscData.getInteriorVecRhs();
}
inline Vec& getSolCoarseSpace()
{
FUNCNAME("PetscSolver::getSolCoarseSpace()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getCoarseVecSol();
}
inline Vec& getSolInterior()
{
return petscData.getInteriorVecSol();
}
inline Mat& getMatIntInt()
{
return petscData.getInteriorMat();
}
inline Mat& getMatCoarseCoarse()
{
FUNCNAME("PetscSolver::getMatCoarseCoarse()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getCoarseMat(0);
}
inline Mat& getMatIntCoarse()
{
FUNCNAME("PetscSolver::getMatIntCoarse()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getIntCoarseMat();
}
inline Mat& getMatCoarseInt()
{
FUNCNAME("PetscSolver::getMatCoarseInt()");
TEST_EXIT_DBG(coarseSpaceMap.size())
("Subdomain solver does not contain a coarse space!\n");
return petscData.getCoarseIntMat();
}
protected:
/** \brief
* Copies between to PETSc vectors by using different index sets for the
......@@ -242,26 +142,6 @@ namespace AMDiS {
vector<int>& originIndex, vector<int>& destIndex);
protected:
MeshDistributor *meshDistributor;
int subdomainLevel;
ParallelDofMapping *interiorMap;
/// Parallel DOF mapping of the (optional) coarse space. Allows to define
/// different coarse spaces for different components.
map<int, ParallelDofMapping*> coarseSpaceMap;
int mpiRank;
MPI::Intracomm mpiCommGlobal;
MPI::Intracomm mpiCommLocal;
/// Petsc's matrices and vectors (possiblly block structured if there is
/// a coarse space defined).
ParallelCoarseSpaceMatVec petscData;
/// PETSc solver object
KSP kspInterior;
......
......@@ -38,10 +38,11 @@ namespace AMDiS {
MatShellGetContext(mat, &ctx);
SchurPrimalData* data = static_cast<SchurPrimalData*>(ctx);
MatMult(data->subSolver->getMatIntCoarse(), x, data->tmp_vec_b);
MatMult(data->subSolver->getMatInteriorCoarse(), x, data->tmp_vec_b);
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
MatMult(data->subSolver->getMatCoarseInt(), data->tmp_vec_b, data->tmp_vec_primal);
MatMult(data->subSolver->getMatCoarseCoarse(), x, y);
MatMult(data->subSolver->getMatCoarseInterior(), data->tmp_vec_b,
data->tmp_vec_primal);
MatMult(data->subSolver->getMatCoarse(), x, y);
VecAXPBY(y, -1.0, 1.0, data->tmp_vec_primal);
return 0;
......@@ -71,13 +72,15 @@ namespace AMDiS {
MatMult(*(data->mat_lagrange), data->tmp_vec_b, data->tmp_vec_lagrange);
MatMult(data->subSolver->getMatCoarseInt(), data->tmp_vec_b, data->tmp_vec_primal);
MatMult(data->subSolver->getMatCoarseInterior(),
data->tmp_vec_b, data->tmp_vec_primal);
wtime01 = MPI::Wtime();
KSPSolve(*(data->ksp_schur_primal), data->tmp_vec_primal, data->tmp_vec_primal);
FetiTimings::fetiSolve02 += (MPI::Wtime() - wtime01);
MatMult(data->subSolver->getMatIntCoarse(), data->tmp_vec_primal, data->tmp_vec_b);
MatMult(data->subSolver->getMatInteriorCoarse(),
data->tmp_vec_primal, data->tmp_vec_b);
wtime01 = MPI::Wtime();
data->subSolver->solveGlobal(data->tmp_vec_b, data->tmp_vec_b);
......@@ -567,6 +570,7 @@ namespace AMDiS {
// === Create for each dual node that is owned by the rank, the set ===
// === of ranks that contain this node (denoted by W(x_j)). ===
int mpiRank = meshDistributor->getMpiRank();
for (DofComm::Iterator it(meshDistributor->getDofComm().getSendDofs(),
meshLevel, feSpace);
!it.end(); it.nextRank()) {
......@@ -705,6 +709,7 @@ namespace AMDiS {
FUNCNAME("PetscSolverFeti::createMatLagrange()");
double wtime = MPI::Wtime();
int mpiRank = meshDistributor->getMpiRank();
// === Create distributed matrix for Lagrange constraints. ===
......@@ -827,13 +832,13 @@ namespace AMDiS {
for (int i = 0; i < nRowsRankB; i++) {
PetscInt row = localDofMap.getStartDofs() + i;
MatGetRow(subdomain->getMatIntCoarse(), row, &nCols, &cols, &values);
MatGetRow(subdomain->getMatInteriorCoarse(), row, &nCols, &cols, &values);
for (int j = 0; j < nCols; j++)
if (values[j] != 0.0)
mat_b_primal_cols[cols[j]].push_back(make_pair(i, values[j]));
MatRestoreRow(subdomain->getMatIntCoarse(), row, &nCols, &cols, &values);
MatRestoreRow(subdomain->getMatInteriorCoarse(), row, &nCols, &cols, &values);
}
TEST_EXIT(static_cast<int>(mat_b_primal_cols.size()) ==
......@@ -870,10 +875,12 @@ namespace AMDiS {
MatAssemblyBegin(matBPi, MAT_FINAL_ASSEMBLY);
MatAssemblyEnd(matBPi, MAT_FINAL_ASSEMBLY);
MatDuplicate(subdomain->getMatCoarseCoarse(), MAT_COPY_VALUES, &mat_schur_primal);
MatDuplicate(subdomain->getMatCoarse(), MAT_COPY_VALUES,
&mat_schur_primal);
Mat matPrimal;
MatMatMult(subdomain->getMatCoarseInt(), matBPi, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &matPrimal);
MatMatMult(subdomain->getMatCoarseInterior(), matBPi, MAT_INITIAL_MATRIX,
PETSC_DEFAULT, &matPrimal);
MatAXPY(mat_schur_primal, -1.0, matPrimal, DIFFERENT_NONZERO_PATTERN);
MatDestroy(&matPrimal);
......@@ -1138,7 +1145,7 @@ namespace AMDiS {
PetscViewer petscView;
PetscViewerBinaryOpen(PETSC_COMM_SELF, "interior.mat",
FILE_MODE_WRITE, &petscView);
MatView(subdomain->getMatIntInt(), petscView);
MatView(subdomain->getMatInterior(), petscView);
PetscViewerDestroy(&petscView);
}
......@@ -1151,7 +1158,7 @@ namespace AMDiS {
PetscViewer petscView;
PetscViewerBinaryOpen(PETSC_COMM_WORLD, "coarse.mat",
FILE_MODE_WRITE, &petscView);
MatView(subdomain->getMatCoarseCoarse(), petscView);
MatView(subdomain->getMatCoarse(), petscView);
PetscViewerDestroy(&petscView);
}
......@@ -1631,14 +1638,14 @@ namespace AMDiS {
// d = L inv(K_BB) f_B - L inv(K_BB) K_BPi inv(S_PiPi) [f_Pi - K_PiB inv(K_BB) f_B]
// vec_rhs = L * inv(K_BB) * f_B
subdomain->solveGlobal(subdomain->getRhsInterior(), tmp_b0);
subdomain->solveGlobal(subdomain->getVecRhsInterior(), tmp_b0);
MatMult(mat_lagrange, tmp_b0, vec_rhs);
// tmp_primal0 = M_PiB * inv(K_BB) * f_B
MatMult(subdomain->getMatCoarseInt(), tmp_b0, tmp_primal0);
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal0);
// tmp_primal0 = f_Pi - M_PiB * inv(K_BB) * f_B
VecAXPBY(tmp_primal0, 1.0, -1.0, subdomain->getRhsCoarseSpace());
VecAXPBY(tmp_primal0, 1.0, -1.0, subdomain->getVecRhsCoarse());
double wtime2 = MPI::Wtime();
// tmp_primal0 = inv(S_PiPi) (f_Pi - M_PiB * inv(K_BB) * f_B)
......@@ -1649,7 +1656,7 @@ namespace AMDiS {
}
//
MatMult(subdomain->getMatIntCoarse(), tmp_primal0, tmp_b0);
MatMult(subdomain->getMatInteriorCoarse(), tmp_primal0, tmp_b0);
subdomain->solveGlobal(tmp_b0, tmp_b0);
MatMult(mat_lagrange, tmp_b0, tmp_lagrange0);
......@@ -1683,13 +1690,13 @@ namespace AMDiS {
// === Solve for u_primals. ===
VecCopy(subdomain->getRhsCoarseSpace(), tmp_primal0);
subdomain->solveGlobal(subdomain->getRhsInterior(), tmp_b0);
MatMult(subdomain->getMatCoarseInt(), tmp_b0, tmp_primal1);
VecCopy(subdomain->getVecRhsCoarse(), tmp_primal0);
subdomain->solveGlobal(subdomain->getVecRhsInterior(), tmp_b0);
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal1);
VecAXPBY(tmp_primal0, -1.0, 1.0, tmp_primal1);
MatMultTranspose(mat_lagrange, vec_sol, tmp_b0);
subdomain->solveGlobal(tmp_b0, tmp_b0);
MatMult(subdomain->getMatCoarseInt(), tmp_b0, tmp_primal1);
MatMult(subdomain->getMatCoarseInterior(), tmp_b0, tmp_primal1);
VecAXPBY(tmp_primal0, 1.0, 1.0, tmp_primal1);
KSPSolve(ksp_schur_primal, tmp_primal0, tmp_primal0);
......@@ -1697,11 +1704,11 @@ namespace AMDiS {
// === Solve for u_b. ===
VecCopy(subdomain->getRhsInterior(), tmp_b0);
VecCopy(subdomain->getVecRhsInterior(), tmp_b0);
MatMultTranspose(mat_lagrange, vec_sol, tmp_b1);
VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
MatMult(subdomain->getMatIntCoarse(), tmp_primal0, tmp_b1);
MatMult(subdomain->getMatInteriorCoarse(), tmp_primal0, tmp_b1);
VecAXPBY(tmp_b0, -1.0, 1.0, tmp_b1);
subdomain->solveGlobal(tmp_b0, tmp_b0);
......
......@@ -26,9 +26,7 @@ namespace AMDiS {
double wtime = MPI::Wtime();
petscData.init(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal,
meshDistributor);
createMatVec(*seqMat);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
nComponents = seqMat->getNumRows();
......@@ -85,21 +83,19 @@ namespace AMDiS {
}
MatCreateNest(mpiCommGlobal,
nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &petscData.getInteriorMat());
MatCreateNest(mpiCommGlobal, nBlocks, PETSC_NULL, nBlocks, PETSC_NULL,
&(nestMat[0]), &getMatInterior());
#if (DEBUG != 0)
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
petscData.matAssembly();
matAssembly();
// === Init PETSc solver. ===
KSPCreate(mpiCommGlobal, &kspInterior);
KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(), SAME_NONZERO_PATTERN);
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
KSPSetFromOptions(kspInterior);
MSG("Fill petsc matrix needed %.5f seconds\n", MPI::Wtime() - wtime);
......@@ -129,9 +125,9 @@ namespace AMDiS {
}
VecCreateNest(mpiCommGlobal, nComponents, PETSC_NULL,
&(nestVec[0]), &(getRhsInterior()));
&(nestVec[0]), &(getVecRhsInterior()));
petscData.vecRhsAssembly();
vecRhsAssembly();
}
......@@ -144,10 +140,10 @@ namespace AMDiS {
setBlockPreconditioner(pcInterior);
const FiniteElemSpace *feSpace = meshDistributor->getFeSpace(0);
VecDuplicate(getRhsInterior(), &petscSolVec);
VecDuplicate(getVecRhsInterior(), &petscSolVec);
// PETSc.
solve(getRhsInterior(), petscSolVec);
solve(getVecRhsInterior(), petscSolVec);
// === Transfere values from PETSc's solution vectors to the DOF vectors. ===
for (int i = 0; i < nComponents; i++) {
......@@ -183,7 +179,7 @@ namespace AMDiS {
if (nestMat[i] != PETSC_NULL)
MatDestroy(&(nestMat[i]));
petscData.matDestroy();
matDestroy();
KSPDestroy(&kspInterior);
}
......@@ -193,7 +189,7 @@ namespace AMDiS {
{
FUNCNAME("PetscSolverGlobalBlockMatrix::destroyVectorData()");
petscData.vecDestroy();
vecDestroy();
VecDestroy(&petscSolVec);
}
......
......@@ -27,10 +27,7 @@ namespace AMDiS {
double wtime = MPI::Wtime();
petscData.init(interiorMap, coarseSpaceMap,
subdomainLevel, mpiCommLocal, mpiCommGlobal,
meshDistributor);
petscData.create(*seqMat);
createMatVec(*seqMat);
if (coarseSpaceMap.size()) {
fillPetscMatrixWithCoarseSpace(seqMat);
......@@ -55,11 +52,11 @@ namespace AMDiS {
MSG("Fill petsc matrix 2 needed %.5f seconds\n", MPI::Wtime() - wtime);
#endif
petscData.matAssembly();
matAssembly();
if (printMatInfo) {
MatInfo matInfo;
MatGetInfo(petscData.getInteriorMat(), MAT_GLOBAL_SUM, &matInfo);
MatGetInfo(getMatInterior(), MAT_GLOBAL_SUM, &matInfo);
MSG("Matrix info:\n");
MSG(" memory usage: %e MB\n", matInfo.memory / (1024.0 * 1024.0));
MSG(" mallocs: %d\n", static_cast<int>(matInfo.mallocs));
......@@ -72,9 +69,7 @@ namespace AMDiS {
KSPCreate(mpiCommGlobal, &kspInterior);
KSPGetPC(kspInterior, &pcInterior);
KSPSetOperators(kspInterior,
petscData.getInteriorMat(),
petscData.getInteriorMat(),
KSPSetOperators(kspInterior, getMatInterior(), getMatInterior(),
SAME_NONZERO_PATTERN);
KSPSetTolerances(kspInterior, 0.0, 1e-8, PETSC_DEFAULT, PETSC_DEFAULT);
KSPSetType(kspInterior, KSPBCGS);
......@@ -113,7 +108,6 @@ namespace AMDiS {
int nRowsRankInterior = interiorMap->getRankDofs();
int nRowsOverallInterior = interiorMap->getOverallDofs();
int rStartInterior = petscData.getStartInterior();
// === Prepare traverse of sequentially created matrices. ===
......@@ -191,7 +185,7 @@ namespace AMDiS {
cols[k] = colCoarseSpace->getMatIndex(j, cols[k]);
// matCoarseCoarse
MatSetValues(petscData.getCoarseMatComp(i),
MatSetValues(getMatCoarseByComponent(i),
1, &rowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);
......@@ -206,7 +200,7 @@ namespace AMDiS {
}
// matCoarseInt
MatSetValues(petscData.getCoarseIntMatComp(i),
MatSetValues(getMatCoarseInteriorByComponent(i),
1, &rowIndex, colsOther.size(),
&(colsOther[0]), &(valuesOther[0]), ADD_VALUES);
}
......@@ -222,7 +216,7 @@ namespace AMDiS {
cols[k] = interiorMap->getMatIndex(j, cols[k]);
}
MatSetValues(petscData.getInteriorMat(), 1, &localRowIndex, cols.size(),
MatSetValues(getMatInterior(), 1, &localRowIndex, cols.size(),
&(cols[0]), &(values[0]), ADD_VALUES);