Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
hyteg
hyteg
Commits
820a959a
Commit
820a959a
authored
Nov 29, 2021
by
Marcel Koch
Browse files
add test for ginkgo block matrix
parent
a78f2abb
Changes
3
Hide whitespace changes
Inline
Side-by-side
src/hyteg/ginkgo/GinkgoSparseMatrixProxy.hpp
View file @
820a959a
...
...
@@ -8,6 +8,7 @@
#include "ginkgo/core/base/mpi.hpp"
#include "ginkgo/core/distributed/matrix.hpp"
#include "ginkgo/core/distributed/partition.hpp"
#include "ginkgo/core/matrix/block_matrix.hpp"
#include "ginkgo/core/matrix/csr.hpp"
namespace
hyteg
{
...
...
tests/hyteg/CMakeLists.txt
View file @
820a959a
...
...
@@ -354,6 +354,9 @@ endif()
if
(
HYTEG_BUILD_WITH_GINKGO
)
waLBerla_compile_test
(
FILES P1/P1GinkgoSolveTest.cpp DEPENDS hyteg core
)
waLBerla_execute_test
(
NAME P1GinkgoSolveTest1 COMMAND $<TARGET_FILE:P1GinkgoSolveTest> PROCESSES 1
)
waLBerla_compile_test
(
FILES composites/P2P1StokesGinkgoApplyTest.cpp DEPENDS hyteg core
)
waLBerla_execute_test
(
NAME P2P1StokesGinkgoApplyTest1 COMMAND $<TARGET_FILE:P2P1StokesGinkgoApplyTest> PROCESSES 1
)
endif
()
...
...
tests/hyteg/composites/P2P1StokesGinkgoApplyTest.cpp
0 → 100644
View file @
820a959a
/*
* Copyright (c) 2017-2019 Dominik Thoennes.
*
* This file is part of HyTeG
* (see https://i10git.cs.fau.de/hyteg/hyteg).
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <ginkgo/core/distributed/communicator.hpp>
#include <numeric>
#include "core/DataTypes.h"
#include "core/math/Random.h"
#include "core/mpi/MPIManager.h"
#include "hyteg/communication/Syncing.hpp"
#include "hyteg/composites/P2P1TaylorHoodFunction.hpp"
#include "hyteg/composites/P2P1TaylorHoodStokesOperator.hpp"
#include "hyteg/dataexport/VTKOutput.hpp"
#include "hyteg/functions/FunctionTraits.hpp"
#include "hyteg/ginkgo/GinkgoSparseMatrixProxy.hpp"
#include "hyteg/ginkgo/GinkgoUtilities.hpp"
#include "hyteg/ginkgo/GinkgoVectorProxy.hpp"
#include "hyteg/mesh/MeshInfo.hpp"
#include "hyteg/petsc/PETScManager.hpp"
#include "hyteg/petsc/PETScVector.hpp"
#include "hyteg/primitivestorage/PrimitiveStorage.hpp"
#include "hyteg/primitivestorage/SetupPrimitiveStorage.hpp"
#include "hyteg/primitivestorage/Visualization.hpp"
#include "hyteg/primitivestorage/loadbalancing/SimpleBalancer.hpp"
using
walberla
::
real_t
;
namespace
hyteg
{
void
gatherIndices
(
std
::
vector
<
PetscInt
>&
velocityIndices
,
std
::
vector
<
PetscInt
>&
pressureIndices
,
const
PrimitiveStorage
&
storage
,
uint_t
level
,
const
P1StokesFunction
<
PetscInt
>&
numerator
)
{
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
0
],
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
1
],
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
if
(
storage
.
hasGlobalCells
()
)
{
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
2
],
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
}
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
p
,
level
)
)
{
pressureIndices
.
push_back
(
dof
.
value
()
);
}
}
void
gatherIndices
(
std
::
vector
<
PetscInt
>&
velocityIndices
,
std
::
vector
<
PetscInt
>&
pressureIndices
,
const
PrimitiveStorage
&
storage
,
uint_t
level
,
const
P2P1TaylorHoodFunction
<
PetscInt
>&
numerator
)
{
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
0
].
getVertexDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
for
(
auto
dof
:
FunctionIterator
<
EdgeDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
0
].
getEdgeDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
1
].
getVertexDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
for
(
auto
dof
:
FunctionIterator
<
EdgeDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
1
].
getEdgeDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
if
(
storage
.
hasGlobalCells
()
)
{
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
2
].
getVertexDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
for
(
auto
dof
:
FunctionIterator
<
EdgeDoFFunction
<
PetscInt
>
>
(
numerator
.
uvw
[
2
].
getEdgeDoFFunction
(),
level
)
)
{
velocityIndices
.
push_back
(
dof
.
value
()
);
}
}
for
(
auto
dof
:
FunctionIterator
<
vertexdof
::
VertexDoFFunction
<
PetscInt
>
>
(
numerator
.
p
,
level
)
)
{
pressureIndices
.
push_back
(
dof
.
value
()
);
}
}
bool
p2p1StokesPetscApplyTest
(
const
uint_t
&
level
,
const
std
::
string
&
meshFile
,
const
DoFType
&
location
,
const
real_t
&
eps
)
{
WALBERLA_LOG_INFO_ON_ROOT
(
"level: "
<<
level
<<
", mesh file: "
<<
meshFile
);
const
bool
writeVTK
=
false
;
MeshInfo
meshInfo
=
hyteg
::
MeshInfo
::
fromGmshFile
(
meshFile
);
SetupPrimitiveStorage
setupStorage
(
meshInfo
,
walberla
::
uint_c
(
walberla
::
mpi
::
MPIManager
::
instance
()
->
numProcesses
()
)
);
setupStorage
.
setMeshBoundaryFlagsOnBoundary
(
1
,
0
,
true
);
loadbalancing
::
roundRobin
(
setupStorage
);
std
::
shared_ptr
<
hyteg
::
PrimitiveStorage
>
storage
=
std
::
make_shared
<
hyteg
::
PrimitiveStorage
>
(
setupStorage
);
writeDomainPartitioningVTK
(
storage
,
"../../output"
,
"P2P1StokesPetscApplyTestDomain"
);
P2P1TaylorHoodFunction
<
real_t
>
src
(
"src"
,
storage
,
level
,
level
);
P2P1TaylorHoodFunction
<
real_t
>
hhgDst
(
"hhgDst"
,
storage
,
level
,
level
);
P2P1TaylorHoodFunction
<
real_t
>
petscDst
(
"petscDst"
,
storage
,
level
,
level
);
P2P1TaylorHoodFunction
<
real_t
>
err
(
"error"
,
storage
,
level
,
level
);
P2P1TaylorHoodFunction
<
real_t
>
ones
(
"ones"
,
storage
,
level
,
level
);
P2P1TaylorHoodFunction
<
PetscInt
>
numerator
(
"numerator"
,
storage
,
level
,
level
);
std
::
function
<
real_t
(
const
hyteg
::
Point3D
&
)
>
zero
=
[](
const
hyteg
::
Point3D
&
)
{
return
0.0
;
};
std
::
function
<
real_t
(
const
hyteg
::
Point3D
&
)
>
one
=
[](
const
hyteg
::
Point3D
&
)
{
return
1.0
;
};
std
::
function
<
real_t
(
const
hyteg
::
Point3D
&
)
>
rand
=
[](
const
hyteg
::
Point3D
&
)
{
return
walberla
::
math
::
realRandom
<
real_t
>
();
};
std
::
function
<
real_t
(
const
hyteg
::
Point3D
&
)
>
srcFunction
=
[](
const
hyteg
::
Point3D
&
x
)
{
return
x
[
0
]
*
x
[
0
]
*
x
[
0
]
*
x
[
0
]
*
std
::
sinh
(
x
[
1
]
)
*
std
::
cos
(
x
[
2
]
);
};
src
.
interpolate
(
srcFunction
,
level
,
hyteg
::
All
);
hhgDst
.
interpolate
(
rand
,
level
,
location
);
petscDst
.
interpolate
(
rand
,
level
,
location
);
ones
.
interpolate
(
one
,
level
,
location
);
P2P1TaylorHoodStokesOperator
L
(
storage
,
level
,
level
);
numerator
.
enumerate
(
level
);
const
uint_t
globalDoFs
=
hyteg
::
numberOfGlobalDoFs
<
hyteg
::
P2P1TaylorHoodFunctionTag
>
(
*
storage
,
level
);
const
uint_t
localDoFs
=
hyteg
::
numberOfLocalDoFs
<
hyteg
::
P2P1TaylorHoodFunctionTag
>
(
*
storage
,
level
);
WALBERLA_LOG_INFO_ON_ROOT
(
"Global DoFs: "
<<
globalDoFs
);
// HyTeG apply
L
.
apply
(
src
,
hhgDst
,
level
,
location
);
// PETSc apply
using
mtx
=
gko
::
distributed
::
Matrix
<
real_t
,
int32_t
>
;
using
vec
=
gko
::
distributed
::
Vector
<
real_t
,
int32_t
>
;
auto
exec
=
gko
::
ReferenceExecutor
::
create
();
auto
comm
=
gko
::
mpi
::
communicator
::
create_world
();
auto
part
=
gko
::
share
(
gko
::
distributed
::
Partition
<
int32_t
>::
build_uniformly
(
exec
,
1
,
localDoFs
)
);
auto
srcGkoVec
=
vec
::
create
(
exec
,
comm
,
part
,
gko
::
dim
<
2
>
{
localDoFs
,
1
},
gko
::
dim
<
2
>
{
localDoFs
,
1
}
);
auto
dstGkoVec
=
vec
::
create
(
exec
,
comm
,
part
,
gko
::
dim
<
2
>
{
localDoFs
,
1
},
gko
::
dim
<
2
>
{
localDoFs
,
1
}
);
auto
gkoMtx
=
mtx
::
create
(
exec
,
comm
);
hyteg
::
petsc
::
createVectorFromFunction
(
src
,
numerator
,
std
::
make_shared
<
GinkgoVectorProxy
>
(
gko
::
lend
(
srcGkoVec
),
gko
::
dim
<
2
>
{
localDoFs
,
1
},
part
),
level
,
All
);
hyteg
::
petsc
::
createVectorFromFunction
(
petscDst
,
numerator
,
std
::
make_shared
<
GinkgoVectorProxy
>
(
gko
::
lend
(
dstGkoVec
),
gko
::
dim
<
2
>
{
localDoFs
,
1
},
part
),
level
,
All
);
auto
proxy
=
std
::
make_shared
<
GinkgoSparseMatrixProxy
<
mtx
>
>
(
gkoMtx
.
get
(),
gko
::
dim
<
2
>
{
localDoFs
,
localDoFs
},
part
);
hyteg
::
petsc
::
createMatrix
(
L
,
numerator
,
numerator
,
proxy
,
level
,
All
);
proxy
->
finalize
();
std
::
vector
<
int32_t
>
vIndices
;
std
::
vector
<
int32_t
>
pIndices
;
gatherIndices
(
vIndices
,
pIndices
,
*
storage
,
level
,
numerator
);
auto
[
p_v_min
,
p_v_max
]
=
std
::
minmax_element
(
begin
(
vIndices
),
end
(
vIndices
)
);
auto
[
p_p_min
,
p_p_max
]
=
std
::
minmax_element
(
begin
(
pIndices
),
end
(
pIndices
)
);
gko
::
span
v_span
(
*
p_v_min
,
*
p_v_max
+
1
);
gko
::
span
p_span
(
*
p_p_min
,
*
p_p_max
+
1
);
if
(
p_span
.
begin
!=
v_span
.
end
)
{
WALBERLA_LOG_INFO_ON_ROOT
(
"Indices are NOT blocked: v"
<<
v_span
<<
", p"
<<
p_span
);
}
auto
b_mtx_v
=
gko
::
share
(
gkoMtx
->
get_local_diag
()
->
create_submatrix
(
v_span
,
v_span
)
);
auto
b_mtx_vp
=
gko
::
share
(
gkoMtx
->
get_local_diag
()
->
create_submatrix
(
v_span
,
p_span
)
);
auto
b_mtx_pv
=
gko
::
share
(
gkoMtx
->
get_local_diag
()
->
create_submatrix
(
p_span
,
v_span
)
);
auto
b_mtx_p
=
gko
::
share
(
gkoMtx
->
get_local_diag
()
->
create_submatrix
(
p_span
,
p_span
)
);
auto
b_vec_s0
=
gko
::
share
(
srcGkoVec
->
get_local
()
->
create_submatrix
(
v_span
,
0
)
);
auto
b_vec_s1
=
gko
::
share
(
srcGkoVec
->
get_local
()
->
create_submatrix
(
p_span
,
0
)
);
auto
b_vec_d0
=
gko
::
share
(
dstGkoVec
->
get_local
()
->
create_submatrix
(
v_span
,
0
)
);
auto
b_vec_d1
=
gko
::
share
(
dstGkoVec
->
get_local
()
->
create_submatrix
(
p_span
,
0
)
);
auto
b_gkoMtx
=
gko
::
matrix
::
BlockMatrix
::
create
(
exec
,
{
localDoFs
,
localDoFs
},
{{
b_mtx_v
,
b_mtx_vp
},
{
b_mtx_pv
,
b_mtx_p
}});
auto
b_srcGkoVec
=
gko
::
matrix
::
BlockMatrix
::
create
(
exec
,
{
localDoFs
,
localDoFs
},
{{
b_vec_s0
},
{
b_vec_s1
}});
auto
b_dstGkoVec
=
gko
::
matrix
::
BlockMatrix
::
create
(
exec
,
{
localDoFs
,
localDoFs
},
{{
b_vec_d0
},
{
b_vec_d1
}});
b_gkoMtx
->
apply
(
b_srcGkoVec
.
get
(),
b_dstGkoVec
.
get
());
hyteg
::
petsc
::
createFunctionFromVector
(
petscDst
,
numerator
,
std
::
make_shared
<
GinkgoVectorProxy
>
(
gko
::
lend
(
dstGkoVec
),
gko
::
dim
<
2
>
{
localDoFs
,
1
},
part
),
level
,
All
);
// compare
err
.
assign
(
{
1.0
,
-
1.0
},
{
hhgDst
,
petscDst
},
level
,
location
);
auto
maxError
=
err
.
uvw
[
0
].
getMaxMagnitude
(
level
);
maxError
=
std
::
max
(
maxError
,
err
.
uvw
[
1
].
getMaxMagnitude
(
level
)
);
if
(
err
.
uvw
.
getDimension
()
==
3
)
{
maxError
=
std
::
max
(
maxError
,
err
.
uvw
[
2
].
getMaxMagnitude
(
level
)
);
}
maxError
=
std
::
max
(
maxError
,
err
.
p
.
getMaxMagnitude
(
level
)
);
WALBERLA_LOG_INFO_ON_ROOT
(
"Error max Magnitude = "
<<
maxError
<<
" eps: "
<<
eps
);
if
(
writeVTK
)
{
VTKOutput
vtkOutput
(
"../../output"
,
"P2P1StokesPetscApplyTest"
,
storage
);
vtkOutput
.
add
(
src
.
uvw
);
vtkOutput
.
add
(
hhgDst
.
uvw
);
vtkOutput
.
add
(
petscDst
.
uvw
);
vtkOutput
.
add
(
err
.
uvw
);
vtkOutput
.
write
(
level
,
0
);
}
if
(
maxError
>
eps
)
{
WALBERLA_LOG_INFO_ON_ROOT
(
"TEST FAILED!"
);
return
false
;
}
else
{
return
true
;
}
}
}
// namespace hyteg
int
main
(
int
argc
,
char
*
argv
[]
)
{
walberla
::
MPIManager
::
instance
()
->
initializeMPI
(
&
argc
,
&
argv
);
walberla
::
MPIManager
::
instance
()
->
useWorldComm
();
hyteg
::
PETScManager
petscManager
(
&
argc
,
&
argv
);
bool
succeeded
=
true
;
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
3
,
"../../data/meshes/quad_4el.msh"
,
hyteg
::
All
,
8.7e-15
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
3
,
"../../data/meshes/annulus_coarse.msh"
,
hyteg
::
All
,
2.6e-13
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
3
,
"../../data/meshes/3D/tet_1el.msh"
,
hyteg
::
All
,
1.0e-16
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
2
,
"../../data/meshes/3D/pyramid_2el.msh"
,
hyteg
::
All
,
7.3e-16
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
2
,
"../../data/meshes/3D/pyramid_4el.msh"
,
hyteg
::
All
,
1.4e-15
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
2
,
"../../data/meshes/3D/regular_octahedron_8el.msh"
,
hyteg
::
All
,
4.0e-15
);
succeeded
&=
hyteg
::
p2p1StokesPetscApplyTest
(
2
,
"../../data/meshes/3D/cube_24el.msh"
,
hyteg
::
All
,
3.5e-15
);
WALBERLA_CHECK
(
succeeded
,
"One of the tests failed"
)
return
EXIT_SUCCESS
;
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment