Commit 1c5badea authored by Nils Kohl's avatar Nils Kohl 🌝
Browse files

Refactored PETSc sparse matrix and vector classes so that the DoF counting is...

Refactored PETSc sparse matrix and vector classes so that the DoF counting is (optionally) done during run time. This is relevant for operators and functions for which the sizes are not known at compile time.
parent 648fd536
Pipeline #37010 failed with stages
in 20 minutes and 1 second
/*
* Copyright (c) 2017-2022 Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <memory>
......@@ -39,21 +58,12 @@ class PETScBlockPreconditionedStokesSolver : public Solver< OperatorType >
: allocatedLevel_( level )
, petscCommunicator_( storage->getSplitCommunicatorByPrimitiveDistribution() )
, num( "numerator", storage, level, level )
, Amat( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"Amat",
petscCommunicator_ )
, AmatNonEliminatedBC( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"AmatNonEliminatedBC",
petscCommunicator_ )
, Pmat( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"Pmat",
petscCommunicator_ )
, xVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "xVec", petscCommunicator_ )
, bVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "bVec", petscCommunicator_ )
, nullspaceVec_( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "nullspaceVec", petscCommunicator_ )
, Amat( "Amat", petscCommunicator_ )
, AmatNonEliminatedBC( "AmatNonEliminatedBC", petscCommunicator_ )
, Pmat( "Pmat", petscCommunicator_ )
, xVec( "xVec", petscCommunicator_ )
, bVec( "bVec", petscCommunicator_ )
, nullspaceVec_( "nullspaceVec", petscCommunicator_ )
, storage_( storage )
, tolerance_( tolerance )
, maxIterations_( maxIterations )
......@@ -208,8 +218,7 @@ class PETScBlockPreconditionedStokesSolver : public Solver< OperatorType >
case 1:
PCSetType( pc_u, PCJACOBI );
break;
case 3:
{
case 3: {
PCSetType( pc_u, PCHYPRE );
break;
}
......
/*
* Copyright (c) 2017-2019 Dominik Thoennes, Nils Kohl.
* Copyright (c) 2017-2022 Dominik Thoennes, Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -44,17 +44,11 @@ class PETScCGSolver : public Solver< OperatorType >
: allocatedLevel_( level )
, petscCommunicator_( storage->getSplitCommunicatorByPrimitiveDistribution() )
, num( "numerator", storage, level, level )
, Amat( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"Amat",
petscCommunicator_ )
, AmatNonEliminatedBC( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"AmatNonEliminatedBC",
petscCommunicator_ )
, xVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "xVec", petscCommunicator_ )
, bVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "bVec", petscCommunicator_ )
, nullspaceVec_( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "nullspaceVec", petscCommunicator_ )
, Amat( "Amat", petscCommunicator_ )
, AmatNonEliminatedBC( "AmatNonEliminatedBC", petscCommunicator_ )
, xVec( "xVec", petscCommunicator_ )
, bVec( "bVec", petscCommunicator_ )
, nullspaceVec_( "nullspaceVec", petscCommunicator_ )
, flag_( hyteg::All )
, nullSpaceSet_( false )
, reassembleMatrix_( false )
......
/*
* Copyright (c) 2017-2019 Nils Kohl, Dominik Thoennes, Marcus Mohr.
* Copyright (c) 2017-2022 Nils Kohl, Dominik Thoennes, Marcus Mohr.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -92,7 +92,7 @@ void exportLinearSystem( OperatorType op,
{
WALBERLA_LOG_INFO_ON_ROOT( " * Converting Operator to PETSc matrix" );
}
PETScSparseMatrix< OperatorType > petscMatrix( localDoFs, globalDoFs, nameMatrix.c_str() );
PETScSparseMatrix< OperatorType > petscMatrix( nameMatrix.c_str() );
FunctionType< idx_t > numerator( "numerator", storage, level, level );
numerator.enumerate( level );
petscMatrix.createMatrixFromOperator( op, level, numerator );
......
......@@ -115,7 +115,7 @@ void exportOperator( OperatorType& op,
{
WALBERLA_LOG_INFO_ON_ROOT( " * Converting Operator to PETSc matrix" )
}
PETScSparseMatrix< OperatorType > petscMatrix( localDoFs, globalDoFs, matrixName.c_str() );
PETScSparseMatrix< OperatorType > petscMatrix( matrixName.c_str() );
typename OperatorType::srcType::template FunctionType< idx_t > numerator( "numerator", storage, level, level );
numerator.enumerate( level );
petscMatrix.createMatrixFromOperator( op, level, numerator );
......
/*
* Copyright (c) 2017-2019 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Nils Kohl.
* Copyright (c) 2017-2022 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -53,20 +53,11 @@ class PETScLUSolver : public Solver< OperatorType >
, allocatedLevel_( level )
, petscCommunicator_( storage->getSplitCommunicatorByPrimitiveDistribution() )
, num( "numerator", storage, level, level )
, Amat( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"Amat",
petscCommunicator_ )
, AmatUnsymmetric( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"AmatUnsymmetric",
petscCommunicator_ )
, AmatTmp( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"AmatTmp",
petscCommunicator_ )
, xVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "xVec", petscCommunicator_ )
, bVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "bVec", petscCommunicator_ )
, Amat( "Amat", petscCommunicator_ )
, AmatUnsymmetric( "AmatUnsymmetric", petscCommunicator_ )
, AmatTmp( "AmatTmp", petscCommunicator_ )
, xVec( "xVec", petscCommunicator_ )
, bVec( "bVec", petscCommunicator_ )
#if 0
, inKernel( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ) )
#endif
......@@ -231,7 +222,7 @@ class PETScLUSolver : public Solver< OperatorType >
storage_->getTimingTree()->start( "RHS vector setup" );
b.assign( {1.0}, {x}, level, DirichletBoundary );
b.assign( { 1.0 }, { x }, level, DirichletBoundary );
bVec.createVectorFromFunction( b, num, level, All );
if ( assumeSymmetry_ )
......
/*
* Copyright (c) 2017-2019 Dominik Thoennes, Nils Kohl.
* Copyright (c) 2017-2022 Dominik Thoennes, Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -44,17 +44,11 @@ class PETScMinResSolver : public Solver< OperatorType >
: allocatedLevel_( level )
, petscCommunicator_( storage->getSplitCommunicatorByPrimitiveDistribution() )
, num( "numerator", storage, level, level )
, Amat( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"Amat",
petscCommunicator_ )
, AmatNonEliminatedBC( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename FunctionType::Tag >( *storage, level, petscCommunicator_ ),
"AmatNonEliminatedBC",
petscCommunicator_ )
, xVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "xVec", petscCommunicator_ )
, bVec( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "bVec", petscCommunicator_ )
, nullspaceVec_( numberOfLocalDoFs< typename FunctionType::Tag >( *storage, level ), "nullspaceVec", petscCommunicator_ )
, Amat( "Amat", petscCommunicator_ )
, AmatNonEliminatedBC( "AmatNonEliminatedBC", petscCommunicator_ )
, xVec( "xVec", petscCommunicator_ )
, bVec( "bVec", petscCommunicator_ )
, nullspaceVec_( "nullspaceVec", petscCommunicator_ )
, flag_( hyteg::All )
, nullSpaceSet_( false )
, reassembleMatrix_( false )
......
/*
* Copyright (c) 2017-2019 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Marcus Mohr, Nils Kohl.
* Copyright (c) 2017-2022 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Marcus Mohr, Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -53,67 +53,12 @@ class PETScSparseMatrix
PETScSparseMatrix() = delete;
PETScSparseMatrix( uint_t localRows,
uint_t localCols,
uint_t globalRows,
uint_t globalCols,
const char name[] = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: petscCommunicator_( petscCommunicator )
PETScSparseMatrix( const std::string name = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: name_( name )
, petscCommunicator_( petscCommunicator )
, allocated_( false )
, assembled_( false )
{
MatCreate( petscCommunicator, &mat );
MatSetType( mat, MATMPIAIJ );
MatSetSizes( mat,
static_cast< PetscInt >( localRows ),
static_cast< PetscInt >( localCols ),
static_cast< PetscInt >( globalRows ),
static_cast< PetscInt >( globalCols ) );
// Roughly overestimate number of non-zero entries for faster assembly of matrix
MatMPIAIJSetPreallocation( mat, 500, NULL, 500, NULL );
setName( name );
reset();
}
PETScSparseMatrix( uint_t localSize,
uint_t globalSize,
const char name[] = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: PETScSparseMatrix( localSize, localSize, globalSize, globalSize, name, petscCommunicator )
{}
PETScSparseMatrix( const std::shared_ptr< PrimitiveStorage >& storage,
const uint_t& level,
const char name[] = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: PETScSparseMatrix( numberOfLocalDoFs< typename OperatorType::dstType::Tag >( *storage, level ),
numberOfGlobalDoFs< typename OperatorType::dstType::Tag >( *storage, level, petscCommunicator ),
name,
petscCommunicator )
{}
PETScSparseMatrix( const FunctionTypeSrc< idx_t >& enumerator,
const uint_t& level,
const char name[] = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: PETScSparseMatrix( numberOfLocalDoFs( enumerator, level ),
numberOfGlobalDoFs( enumerator, level, petscCommunicator ),
name,
petscCommunicator )
{}
PETScSparseMatrix( const FunctionTypeSrc< idx_t >& enumeratorSrc,
const FunctionTypeDst< idx_t >& enumeratorDst,
const uint_t& level,
const char name[] = "Mat",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: PETScSparseMatrix( numberOfLocalDoFs( enumeratorDst, level ),
numberOfLocalDoFs( enumeratorSrc, level ),
numberOfGlobalDoFs( enumeratorDst, level, petscCommunicator ),
numberOfGlobalDoFs( enumeratorSrc, level, petscCommunicator ),
name,
petscCommunicator )
{}
virtual ~PETScSparseMatrix() { MatDestroy( &mat ); }
......@@ -123,6 +68,13 @@ class PETScSparseMatrix
const FunctionTypeSrc< idx_t >& numerator,
DoFType flag = All )
{
const uint_t localRows = numberOfLocalDoFs( numerator, level );
const uint_t localCols = numberOfLocalDoFs( numerator, level );
const uint_t globalRows = numberOfGlobalDoFs( numerator, level, petscCommunicator_ );
const uint_t globalCols = numberOfGlobalDoFs( numerator, level, petscCommunicator_ );
allocateSparseMatrix( localRows, localCols, globalRows, globalCols );
auto proxy = std::make_shared< PETScSparseMatrixProxy >( mat );
op.toMatrix( proxy, numerator, numerator, level, flag );
......@@ -137,6 +89,13 @@ class PETScSparseMatrix
const FunctionTypeDst< idx_t >& numeratorDst,
DoFType flag = All )
{
const uint_t localRows = numberOfLocalDoFs( numeratorDst, level );
const uint_t localCols = numberOfLocalDoFs( numeratorSrc, level );
const uint_t globalRows = numberOfGlobalDoFs( numeratorDst, level, petscCommunicator_ );
const uint_t globalCols = numberOfGlobalDoFs( numeratorSrc, level, petscCommunicator_ );
allocateSparseMatrix( localRows, localCols, globalRows, globalCols );
auto proxy = std::make_shared< PETScSparseMatrixProxy >( mat );
op.toMatrix( proxy, numeratorSrc, numeratorDst, level, flag );
......@@ -263,7 +222,13 @@ class PETScSparseMatrix
inline void reset() { assembled_ = false; }
/// \brief Sets all entries of the matrix to zero.
inline void zeroEntries() { MatZeroEntries( mat ); }
inline void zeroEntries()
{
if ( allocated_ )
{
MatZeroEntries( mat );
}
}
inline void setName( const char name[] ) { PetscObjectSetName( (PetscObject) mat, name ); }
......@@ -322,9 +287,32 @@ class PETScSparseMatrix
};
protected:
MPI_Comm petscCommunicator_;
Mat mat;
bool assembled_;
std::string name_;
MPI_Comm petscCommunicator_;
Mat mat;
bool allocated_;
bool assembled_;
private:
inline void allocateSparseMatrix( uint_t localRows, uint_t localCols, uint_t globalRows, uint_t globalCols )
{
if ( !allocated_ )
{
MatCreate( petscCommunicator_, &mat );
MatSetType( mat, MATMPIAIJ );
MatSetSizes( mat,
static_cast< PetscInt >( localRows ),
static_cast< PetscInt >( localCols ),
static_cast< PetscInt >( globalRows ),
static_cast< PetscInt >( globalCols ) );
// Roughly overestimate number of non-zero entries for faster assembly of matrix
MatMPIAIJSetPreallocation( mat, 500, NULL, 500, NULL );
setName( name_.c_str() );
reset();
allocated_ = true;
}
}
};
} // namespace hyteg
......
/*
* Copyright (c) 2017-2019 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Nils Kohl.
* Copyright (c) 2017-2022 Boerge Struempfel, Daniel Drzisga, Dominik Thoennes, Nils Kohl.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
......@@ -43,29 +43,24 @@ class PETScVector
public:
PETScVector() = delete;
PETScVector( const std::string& name = "Vec",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: name_( name )
, petscCommunicator_( petscCommunicator )
, allocated_( false )
{}
PETScVector( const FunctionType< ValueType >& function,
const FunctionType< idx_t >& numerator,
const uint_t& level,
const DoFType& flag = All,
const std::string& name = "Vec",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: PETScVector( numberOfLocalDoFs( function, level ), name, petscCommunicator )
: PETScVector( name, petscCommunicator )
{
createVectorFromFunction( function, numerator, level, flag );
}
PETScVector( uint_t localSize,
const std::string& name = "Vec",
const MPI_Comm& petscCommunicator = walberla::mpi::MPIManager::instance()->comm() )
: petscCommunicator_( petscCommunicator )
{
VecCreate( petscCommunicator, &vec );
VecSetType( vec, VECSTANDARD );
VecSetSizes( vec, (idx_t) localSize, PETSC_DECIDE );
VecSetUp( vec );
setName( name.c_str() );
}
~PETScVector() { VecDestroy( &vec ); }
MPI_Comm getCommunicator() const { return petscCommunicator_; }
......@@ -75,6 +70,9 @@ class PETScVector
uint_t level,
DoFType flag = All )
{
const auto localSize = numberOfLocalDoFs( src, level );
allocateVector( localSize );
auto proxy = std::make_shared< PETScVectorProxy >( vec );
hyteg::createVectorFromFunction( src, numerator, proxy, level, flag );
......@@ -87,6 +85,8 @@ class PETScVector
uint_t level,
DoFType flag = All )
{
WALBERLA_CHECK( allocated_, "Cannot create function from PETSc vector - the vector wasn't even allocated." );
auto proxy = std::make_shared< PETScVectorProxy >( vec );
hyteg::createFunctionFromVector( dst, numerator, proxy, level, flag );
}
......@@ -112,8 +112,24 @@ class PETScVector
inline Vec& get() { return vec; }
protected:
MPI_Comm petscCommunicator_;
Vec vec;
std::string name_;
MPI_Comm petscCommunicator_;
Vec vec;
bool allocated_;
private:
inline void allocateVector( uint_t localSize )
{
if ( !allocated_ )
{
VecCreate( petscCommunicator_, &vec );
VecSetType( vec, VECSTANDARD );
VecSetSizes( vec, (idx_t) localSize, PETSC_DECIDE );
VecSetUp( vec );
setName( name_.c_str() );
allocated_ = true;
}
}
};
} // namespace hyteg
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment