Skip to content
Snippets Groups Projects
Commit 553205fe authored by Dominik Thoennes's avatar Dominik Thoennes
Browse files

change gcc 8 to ubuntu 20.04 image

parent 7b9a3209
Branches
No related merge requests found
...@@ -412,100 +412,163 @@ gcc_7_hybrid_dbg_sp: ...@@ -412,100 +412,163 @@ gcc_7_hybrid_dbg_sp:
gcc_8_serial: gcc_8_serial:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
WALBERLA_BUILD_WITH_MPI: "OFF" WALBERLA_BUILD_WITH_MPI: "OFF"
WALBERLA_BUILD_WITH_OPENMP: "OFF" WALBERLA_BUILD_WITH_OPENMP: "OFF"
WALBERLA_BUILD_WITH_PARMETIS: "OFF" WALBERLA_BUILD_WITH_PARMETIS: "OFF"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_mpionly: gcc_8_mpionly:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
WALBERLA_BUILD_WITH_OPENMP: "OFF" WALBERLA_BUILD_WITH_OPENMP: "OFF"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_hybrid: gcc_8_hybrid:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_serial_dbg: gcc_8_serial_dbg:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
WALBERLA_BUILD_WITH_MPI: "OFF" WALBERLA_BUILD_WITH_MPI: "OFF"
WALBERLA_BUILD_WITH_OPENMP: "OFF" WALBERLA_BUILD_WITH_OPENMP: "OFF"
WALBERLA_BUILD_WITH_PARMETIS: "OFF" WALBERLA_BUILD_WITH_PARMETIS: "OFF"
CMAKE_BUILD_TYPE: "DebugOptimized" CMAKE_BUILD_TYPE: "DebugOptimized"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_mpionly_dbg: gcc_8_mpionly_dbg:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
CMAKE_BUILD_TYPE: "DebugOptimized" CMAKE_BUILD_TYPE: "DebugOptimized"
WALBERLA_BUILD_WITH_OPENMP: "OFF" WALBERLA_BUILD_WITH_OPENMP: "OFF"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_hybrid_dbg: gcc_8_hybrid_dbg:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
CMAKE_BUILD_TYPE: "DebugOptimized" CMAKE_BUILD_TYPE: "DebugOptimized"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_8_hybrid_dbg_sp: gcc_8_hybrid_dbg_sp:
extends: .build_template extends: .build_template
image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8 image: i10git.cs.fau.de:5005/walberla/buildenvs/gcc:8
before_script:
- pip3 install lbmpy==1.0 jinja2 pytest
- cd python
- python3 -m pytest --junitxml=report.xml pystencils_walberla lbmpy_walberla
- pip3 list
- cd ..
- CC=gcc CXX=g++ pip3 install pycuda
variables: variables:
WALBERLA_BUILD_WITH_CUDA: "ON" WALBERLA_BUILD_WITH_CUDA: "ON"
CMAKE_BUILD_TYPE: "DebugOptimized" CMAKE_BUILD_TYPE: "DebugOptimized"
WALBERLA_DOUBLE_ACCURACY: "OFF" WALBERLA_DOUBLE_ACCURACY: "OFF"
WALBERLA_BUILD_WITH_PARMETIS: "OFF" WALBERLA_BUILD_WITH_PARMETIS: "OFF"
WALBERLA_BUILD_WITH_METIS: "OFF" WALBERLA_BUILD_WITH_METIS: "OFF"
WALBERLA_BUILD_WITH_CODEGEN: "ON"
WALBERLA_BUILD_WITH_PYTHON: "ON"
only: only:
variables: variables:
- $ENABLE_NIGHTLY_BUILDS - $ENABLE_NIGHTLY_BUILDS
tags: tags:
- cuda - cuda11
- docker - docker
gcc_9_serial: gcc_9_serial:
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment