Commit 4b73bf2f authored by David Moxey's avatar David Moxey
Browse files

Working version of PETSc static cond (including multi-level)

parent cd63fe2d
......@@ -96,7 +96,7 @@ namespace Nektar
// Copy results into output vector
PetscScalar *avec;
VecGetArray (m_locVec, &avec);
Vmath::Vcopy (nHomDofs, avec, 1, &pOutput[0], 1);
Vmath::Vcopy (nHomDofs, avec, 1, &pOutput[pNumDir], 1);
VecRestoreArray(m_locVec, &avec);
......@@ -49,7 +49,7 @@ namespace Nektar
class ExpList;
/// A global linear system.
class GlobalLinSysPETSc : public GlobalLinSys
class GlobalLinSysPETSc : virtual public GlobalLinSys
/// Constructor for full direct matrix solve.
......@@ -62,7 +62,8 @@ namespace Nektar
const GlobalLinSysKey &pLinSysKey,
const boost::weak_ptr<ExpList> &pExp,
const boost::shared_ptr<AssemblyMap> &pLocToGloMap)
: GlobalLinSysPETSc(pLinSysKey, pExp, pLocToGloMap)
: GlobalLinSys (pLinSysKey, pExp, pLocToGloMap),
GlobalLinSysPETSc(pLinSysKey, pExp, pLocToGloMap)
ASSERTL1(m_linSysKey.GetGlobalSysSolnType() == ePETScFullMatrix,
"This routine should only be used when using a Full PETSc"
......@@ -173,8 +174,7 @@ namespace Nektar
tmp.get(), 1);
SolveLinearSystem(nGlobDofs, tmp,
tmp2 = pOutput + nDirDofs,
SolveLinearSystem(nGlobDofs, tmp, pOutput,
pLocToGloMap, nDirDofs);
......@@ -32,10 +32,12 @@
// Description: GlobalLinSysStaticCond header
#include <MultiRegions/GlobalLinSysPETSc.h>
#include <MultiRegions/GlobalLinSysStaticCond.h>
#include <MultiRegions/MultiRegionsDeclspec.h>
namespace Nektar
......@@ -50,7 +52,8 @@ namespace Nektar
/// A global linear system.
class GlobalLinSysPETScStaticCond : public GlobalLinSysPETSc
class GlobalLinSysPETScStaticCond : virtual public GlobalLinSysPETSc,
virtual public GlobalLinSysStaticCond
/// Creates an instance of this class
......@@ -60,8 +63,11 @@ namespace Nektar
const boost::shared_ptr<AssemblyMap>
return MemoryManager<GlobalLinSysPETScStaticCond>
::AllocateSharedPtr(pLinSysKey, pExpList, pLocToGloMap);
GlobalLinSysSharedPtr p = MemoryManager<
pLinSysKey, pExpList, pLocToGloMap);
return p;
/// Name of class
......@@ -88,42 +94,20 @@ namespace Nektar
MULTI_REGIONS_EXPORT virtual ~GlobalLinSysPETScStaticCond();
/// Schur complement for Direct Static Condensation.
GlobalLinSysPETScStaticCondSharedPtr m_recursiveSchurCompl;
/// Block matrices at this level
DNekScalBlkMatSharedPtr m_schurCompl;
DNekScalBlkMatSharedPtr m_BinvD;
DNekScalBlkMatSharedPtr m_C;
DNekScalBlkMatSharedPtr m_invD;
/// Solve the linear system for given input and output vectors
/// using a specified local to global map.
virtual void v_Solve(
const Array<OneD, const NekDouble> &in,
Array<OneD, NekDouble> &out,
const AssemblyMapSharedPtr &locToGloMap,
const Array<OneD, const NekDouble> &dirForcing
= NullNekDouble1DArray);
/// Initialise this object
void Initialise(
const boost::shared_ptr<AssemblyMap>& locToGloMap);
/// Set up the storage for the Schur complement or the top level
/// of the multi-level Schur complement.
void SetupTopLevel(
const boost::shared_ptr<AssemblyMap>& locToGloMap);
virtual GlobalLinSysStaticCondSharedPtr v_Recurse(
const GlobalLinSysKey &mkey,
const boost::weak_ptr<ExpList> &pExpList,
const DNekScalBlkMatSharedPtr pSchurCompl,
const DNekScalBlkMatSharedPtr pBinvD,
const DNekScalBlkMatSharedPtr pC,
const DNekScalBlkMatSharedPtr pInvD,
const boost::shared_ptr<AssemblyMap> &locToGloMap);
/// Assemble the Schur complement matrix.
void AssembleSchurComplement(
const boost::shared_ptr<AssemblyMap>& locToGloMap);
void ConstructNextLevelCondensedSystem(
const boost::shared_ptr<AssemblyMap>& locToGloMap);
virtual void v_AssembleSchurComplement(
boost::shared_ptr<AssemblyMap> locToGloMap);
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment