Due to a key rollover of our WebSSO provider there might be a short downtime of this Gitlab server on Friday 2.

Commit cf2af49a authored by Markus Holzer's avatar Markus Holzer

Merge branch 'Python_Code_Style' into 'master'

Adapting flake8 formatig for python files

See merge request !317
parents 3489ae62 9ac46dc3
Pipeline #25610 passed with stages
in 477 minutes and 6 seconds
[flake8]
max-line-length=120
exclude=apps/showcases/Mixer/GenerateModule.py, # contains only statements
apps/benchmarks/FieldCommunication/config.py # just a config file
utilities/bashhelper/createShortcuts.py # contains a lot of really long strings
ignore = W503 C901 E741
......@@ -34,7 +34,8 @@ sng_network = supermuc_network_spread()
class AlreadySimulated:
def __init__(self, db_file, properties=('processes0*processes1*processes2', 'layout', 'ghostLayers', 'cartesianCommunicator', 'stencil',
def __init__(self, db_file, properties=('processes0*processes1*processes2', 'layout', 'ghostLayers',
'cartesianCommunicator', 'stencil',
'cellsPerBlock0', 'cellsPerBlock1', 'cellsPerBlock2',
'blocksPerProcess', 'localCommunicationMode', 'singleMessage',
'fieldsPdf', 'fieldsPdfOpt', 'fieldsVector', 'fieldsScalar',
......
......@@ -3,66 +3,67 @@
from jinja2 import Environment, FileSystemLoader
import os
class Parameter:
def __init__(self, name, type, defValue=""):
"""Propery of a data strcuture
def __init__(self, name, type, defValue=""):
"""Propery of a data strcuture
Parameters
----------
name : str
name of the property
type : str
type of the property
defValue : str
default value the property should be initialized with
"""
Parameters
----------
name : str
name of the property
type : str
type of the property
defValue : str
default value the property should be initialized with
"""
self.name = name
self.type = type
self.defValue = defValue
self.name = name
self.type = type
self.defValue = defValue
def __str__(self):
return "name: {}, type: {}, defValue: {}".format(self.name, self.type, self.defValue)
def __str__(self):
return "name: {}, type: {}, defValue: {}".format(self.name, self.type, self.defValue)
class Config:
def __init__(self):
self.parameters = []
def __init__(self):
self.parameters = []
def parameterExists(self, name):
for v in self.parameters:
if v.name==name:
return True
return False
def parameterExists(self, name):
for v in self.parameters:
if v.name == name:
return True
return False
def addParameter(self, name, type, defValue):
if self.parameterExists( name ):
print("parameters already added: " + name)
else:
self.parameters.append( Parameter(name, type, defValue) )
def addParameter(self, name, type, defValue):
if self.parameterExists(name):
print("parameters already added: " + name)
else:
self.parameters.append(Parameter(name, type, defValue))
def generateFile(self, template):
context = dict()
context["parameters"] = self.parameters
def generateFile(self, template):
context = dict()
context["parameters"] = self.parameters
path = ""
filename = template.replace(".templ", "")
dirname = os.path.dirname(__file__)
env = Environment(loader=FileSystemLoader(dirname))
print("generating: " + path + filename)
fout = open(path + filename, "wb")
content = env.get_template(template).render(context)
fout.write(content.encode('utf8'))
fout.close()
path = ""
filename = template.replace(".templ", "")
dirname = os.path.dirname(__file__)
env = Environment(loader=FileSystemLoader(dirname))
print("generating: " + path + filename)
with open(path + filename, "wb") as fout:
content = env.get_template(template).render(context)
fout.write(content.encode('utf8'))
def generate(self):
print("="*90)
print("Config File:")
print("")
print("{0: <30}{1: <30}{2: <30}".format("Name", "Type", "Def. Value"))
print("="*90)
for param in self.parameters:
print("{0: <30.29}{1: <30.29}{2: <30.29}".format(param.name, param.type, param.defValue))
print("="*90)
def generate(self):
print("=" * 90)
print("Config File:")
print("")
print("{0: <30}{1: <30}{2: <30}".format("Name", "Type", "Def. Value"))
print("=" * 90)
for param in self.parameters:
print("{0: <30.29}{1: <30.29}{2: <30.29}".format(param.name, param.type, param.defValue))
print("=" * 90)
self.generateFile("Parameters.templ.h")
self.generateFile("Parameters.templ.cpp")
self.generateFile("Parameters.templ.h")
self.generateFile("Parameters.templ.cpp")
......@@ -30,7 +30,8 @@ if __name__ == '__main__':
ps.add_property("type", "uint_t", defValue="0", syncMode="ON_GHOST_CREATION")
ps.add_property("flags", "walberla::mesa_pd::data::particle_flags::FlagT", defValue="", syncMode="ON_GHOST_CREATION")
ps.add_property("flags", "walberla::mesa_pd::data::particle_flags::FlagT", defValue="",
syncMode="ON_GHOST_CREATION")
ps.add_property("nextParticle", "int", defValue="-1", syncMode="NEVER")
ps.add_include("blockforest/BlockForest.h")
......
......@@ -20,14 +20,14 @@ cfg.addParameter("visSpacing", "int64_t", "1000")
cfg.addParameter("vtk_out", "std::string", '"vtk_out"')
cfg.addParameter("sqlFile", "std::string", '"benchmark.sqlite"')
cfg.addParameter("recalculateBlockLevelsInRefresh", "bool", "false");
cfg.addParameter("alwaysRebalanceInRefresh", "bool", "true");
cfg.addParameter("reevaluateMinTargetLevelsAfterForcedRefinement", "bool", "false");
cfg.addParameter("allowRefreshChangingDepth", "bool", "false");
cfg.addParameter("recalculateBlockLevelsInRefresh", "bool", "false")
cfg.addParameter("alwaysRebalanceInRefresh", "bool", "true")
cfg.addParameter("reevaluateMinTargetLevelsAfterForcedRefinement", "bool", "false")
cfg.addParameter("allowRefreshChangingDepth", "bool", "false")
cfg.addParameter("allowMultipleRefreshCycles", "bool", "false");
cfg.addParameter("checkForEarlyOutInRefresh", "bool", "true");
cfg.addParameter("checkForLateOutInRefresh", "bool", "true");
cfg.addParameter("allowMultipleRefreshCycles", "bool", "false")
cfg.addParameter("checkForEarlyOutInRefresh", "bool", "true")
cfg.addParameter("checkForLateOutInRefresh", "bool", "true")
cfg.addParameter("regridMin", "uint_t", 'uint_c(100)')
cfg.addParameter("regridMax", "uint_t", 'uint_c(1000)')
......@@ -35,8 +35,8 @@ cfg.addParameter("maxBlocksPerProcess", "int", 'int_c(1000)')
cfg.addParameter("baseWeight", "real_t", 'real_t(10.0)')
cfg.addParameter("metisipc2redist", "real_t", 'real_t(1000.0)')
cfg.addParameter("LBAlgorithm", "std::string", '"Hilbert"')
cfg.addParameter("metisAlgorithm", "std::string", '"PART_GEOM_KWAY"' );
cfg.addParameter("metisWeightsToUse", "std::string", '"BOTH_WEIGHTS"' );
cfg.addParameter("metisEdgeSource", "std::string", '"EDGES_FROM_EDGE_WEIGHTS"' );
cfg.addParameter("metisAlgorithm", "std::string", '"PART_GEOM_KWAY"')
cfg.addParameter("metisWeightsToUse", "std::string", '"BOTH_WEIGHTS"')
cfg.addParameter("metisEdgeSource", "std::string", '"EDGES_FROM_EDGE_WEIGHTS"')
cfg.generate()
......@@ -2,51 +2,49 @@
import os
import time
import math
import random
import re
from influxdb import InfluxDBClient
from git import Repo
class Upload:
def __init__(self):
try:
self.write_user_pw = os.environ["INFLUXDB_MESAPD_PW"]
except KeyError:
import sys
print('Password for the InfluxDB write_user was not set.\n',
'See https://docs.gitlab.com/ee/ci/variables/#secret-variables', file=sys.stderr)
exc_info = sys.exc_info()
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
self.client = InfluxDBClient('i10grafana.informatik.uni-erlangen.de', 8086,
'mesa_pd', self.write_user_pw, 'mesa_pd')
def process(self, filename):
tts = dict()
with open(filename) as f:
for s in f.readlines():
m = re.search(r'\[0\]\s*(\w*)\s*\|[\s\d\.\%]*\|\s*([\d\.]*)', s)
if (m != None):
tts[m.group(1)] = float(m.group(2))
json_body = [
{
'measurement': 'mesa_pd_benchmark',
'tags': {
'host' : os.uname()[1],
'image' : os.environ["DOCKER_IMAGE_NAME"],
},
'time': int(time.time()),
'fields': tts
}
]
print(tts)
self.client.write_points(json_body, time_precision='s')
def __init__(self):
try:
self.write_user_pw = os.environ["INFLUXDB_MESAPD_PW"]
except KeyError:
import sys
print('Password for the InfluxDB write_user was not set.\n',
'See https://docs.gitlab.com/ee/ci/variables/#secret-variables', file=sys.stderr)
exc_info = sys.exc_info()
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
self.client = InfluxDBClient('i10grafana.informatik.uni-erlangen.de', 8086,
'mesa_pd', self.write_user_pw, 'mesa_pd')
def process(self, filename):
tts = dict()
with open(filename) as f:
for s in f.readlines():
m = re.search(r'\[0\]\s*(\w*)\s*\|[\s\d\.\%]*\|\s*([\d\.]*)', s)
if m is not None:
tts[m.group(1)] = float(m.group(2))
json_body = [
{
'measurement': 'mesa_pd_benchmark',
'tags': {
'host': os.uname()[1],
'image': os.environ["DOCKER_IMAGE_NAME"],
},
'time': int(time.time()),
'fields': tts
}
]
print(tts)
self.client.write_points(json_body, time_precision='s')
if __name__ == "__main__":
up = Upload()
up.process("mesa_pd.txt")
up = Upload()
up.process("mesa_pd.txt")
# -*- coding: utf-8 -*-
import os
import time
import math
import random
import re
from influxdb import InfluxDBClient
from git import Repo
class Upload:
def __init__(self):
try:
self.write_user_pw = os.environ["INFLUXDB_WRITE_USER"]
except KeyError:
import sys
print('Password for the InfluxDB write_user was not set.\n',
'See https://docs.gitlab.com/ee/ci/variables/#secret-variables', file=sys.stderr)
exc_info = sys.exc_info()
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
self.client = InfluxDBClient('i10grafana.informatik.uni-erlangen.de', 8086,
'pe', self.write_user_pw, 'pe')
def process(self, filename, model, friction, sync, parallelization):
with open(filename) as f:
s = f.read()
m = re.search('PUpS: (\S*)', s)
json_body = [
{
'measurement': 'pe_benchmark',
'tags': {
'host' : os.uname()[1],
'image' : os.environ["DOCKER_IMAGE_NAME"],
'model' : model,
'friction' : friction,
'sync' : sync,
'parallelization' : parallelization
},
'time': int(time.time()),
'fields': {'PUpS': float(m.group(1))}
}
]
print(float(m.group(1)))
self.client.write_points(json_body, time_precision='s')
def __init__(self):
try:
self.write_user_pw = os.environ["INFLUXDB_WRITE_USER"]
except KeyError:
import sys
print('Password for the InfluxDB write_user was not set.\n',
'See https://docs.gitlab.com/ee/ci/variables/#secret-variables', file=sys.stderr)
exc_info = sys.exc_info()
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
if __name__ == "__main__":
up = Upload()
up.process("GranularGas_DEM_NN.txt", "DEM", "Coulomb", "next neighbors", "8P1T")
up.process("GranularGas_DEM_SO.txt", "DEM", "Coulomb", "shadow owners", "8P1T")
up.process("GranularGas_HCSITS_NN_IFC.txt", "HCSITS", "InelasticFrictionlessContact", "next neighbors", "8P1T")
up.process("GranularGas_HCSITS_NN_AICCBD.txt", "HCSITS", "ApproximateInelasticCoulombContactByDecoupling", "next neighbors", "8P1T")
up.process("GranularGas_HCSITS_NN_ICCBD.txt", "HCSITS", "InelasticCoulombContactByDecoupling", "next neighbors", "8P1T")
up.process("GranularGas_HCSITS_NN_IGMDC.txt", "HCSITS", "InelasticGeneralizedMaximumDissipationContact", "next neighbors", "8P1T")
up.process("GranularGas_HCSITS_SO_IFC.txt", "HCSITS", "InelasticFrictionlessContact", "shadow owners", "8P1T")
self.client = InfluxDBClient('i10grafana.informatik.uni-erlangen.de', 8086,
'pe', self.write_user_pw, 'pe')
def process(self, filename, model, friction, sync, parallelization):
with open(filename) as f:
s = f.read()
m = re.search(r'PUpS: (\S*)', s)
json_body = [
{
'measurement': 'pe_benchmark',
'tags': {
'host': os.uname()[1],
'image': os.environ["DOCKER_IMAGE_NAME"],
'model': model,
'friction': friction,
'sync': sync,
'parallelization': parallelization
},
'time': int(time.time()),
'fields': {'PUpS': float(m.group(1))}
}
]
print(float(m.group(1)))
self.client.write_points(json_body, time_precision='s')
if __name__ == "__main__":
up = Upload()
up.process("GranularGas_DEM_NN.txt", "DEM", "Coulomb", "next neighbors", "8P1T")
up.process("GranularGas_DEM_SO.txt", "DEM", "Coulomb", "shadow owners", "8P1T")
up.process("GranularGas_HCSITS_NN_IFC.txt", "HCSITS", "InelasticFrictionlessContact", "next neighbors", "8P1T")
up.process("GranularGas_HCSITS_NN_AICCBD.txt", "HCSITS", "ApproximateInelasticCoulombContactByDecoupling",
"next neighbors", "8P1T")
up.process("GranularGas_HCSITS_NN_ICCBD.txt", "HCSITS", "InelasticCoulombContactByDecoupling", "next neighbors",
"8P1T")
up.process("GranularGas_HCSITS_NN_IGMDC.txt", "HCSITS", "InelasticGeneralizedMaximumDissipationContact",
"next neighbors", "8P1T")
up.process("GranularGas_HCSITS_SO_IFC.txt", "HCSITS", "InelasticFrictionlessContact", "shadow owners", "8P1T")
......@@ -147,7 +147,7 @@ sweep_block_size = (TypedSymbol("cudaBlockSize0", np.int32),
sweep_params = {'block_size': sweep_block_size}
info_header = f"""
#include "stencil/D3Q{q_phase}.h"\nusing Stencil_phase_T = walberla::stencil::D3Q{q_phase};
#include "stencil/D3Q{q_phase}.h"\nusing Stencil_phase_T = walberla::stencil::D3Q{q_phase};
#include "stencil/D3Q{q_hydro}.h"\nusing Stencil_hydro_T = walberla::stencil::D3Q{q_hydro};
"""
......
......@@ -3,75 +3,78 @@
import numpy as np
import matplotlib.pyplot as plt
kernels = dict()
class Kernel:
def __init__(self,name, cyclesFirstLoop=0, cyclesSecondLoop=0, cyclesRegPerLUP =0):
def __init__(self, name, cyclesFirstLoop=0, cyclesSecondLoop=0, cyclesRegPerLUP=0):
self.name = name
if cyclesRegPerLUP <= 0:
self.cyclesFirstLoop = cyclesFirstLoop
self.cyclesFirstLoop = cyclesFirstLoop
self.cyclesSecondLoop = cyclesSecondLoop
self.cyclesRegPerLUP = cyclesFirstLoop + 9* cyclesSecondLoop
self.cyclesRegPerLUP = cyclesFirstLoop + 9 * cyclesSecondLoop
else:
self.cyclesRegPerLUP = cyclesRegPerLUP
self.cyclesRegPerCacheLine = 8*self.cyclesRegPerLUP
self.cyclesL1L2 = 3*19*2
self.cyclesL2L3 = 3*19*2
self.cyclesRegPerCacheLine = 8 * self.cyclesRegPerLUP
self.cyclesL1L2 = 3 * 19 * 2
self.cyclesL2L3 = 3 * 19 * 2
self.freq = 2.7e9
self.cyclesMem = 305
#self.cyclesMem = 191
# self.cyclesMem = 191
def mlups(self, processes):
singleCoreCycles = self.cyclesRegPerCacheLine + self.cyclesL1L2 + self.cyclesL2L3 + self.cyclesMem
timeSingleCore = singleCoreCycles / self.freq
mlups = 8 / timeSingleCore * 1e-6
#todo
# todo
mlupsMax = 78
return min ( processes * mlups, mlupsMax )
def plot( self, divideByProcesses=False,processes=8, label="" ):
x = np.arange( 1, processes+1, 1 )
return min(processes * mlups, mlupsMax)
def plot(self, divideByProcesses=False, processes=8, label=""):
x = np.arange(1, processes + 1, 1)
if divideByProcesses:
y = np.array( [ self.mlups(i)/i for i in x ] )
y = np.array([self.mlups(i) / i for i in x])
else:
y = np.array( [ self.mlups(i) for i in x ] )
if label=="":
label = "ecm\_" + self.name
plt.plot( x, y, marker='^', markersize=5, label = label)
y = np.array([self.mlups(i) for i in x])
kernels=dict()
if label == "":
label = "ecm_" + self.name
plt.plot(x, y, marker='^', markersize=5, label=label)
#kernels['srt_split'] = Kernel("srt_split", 46, 12 )
kernels['srt_pure'] = Kernel("srt_pure", 40, 8 )
kernels['trt_split'] = Kernel("trt\_split", 41, 11 )
kernels['srt_nonopt'] = Kernel("srt_nonopt", cyclesRegPerLUP = 1045) #SRTStreamCollide.h - pgo and lto (20cycles first loop, 35 second)
kernels = dict()
# kernels['srt_split'] = Kernel("srt_split", 46, 12 )
#kernels['trt_pure_intelOpt'] = Kernel("trt_pure_intelOpt", 41/2, 10/2 ) # vectorized (v*pd)
kernels['srt_pure'] = Kernel("srt_pure", 40, 8)
kernels['trt_split'] = Kernel("trt_split", 41, 11)
# SRTStreamCollide.h - pgo and lto (20cycles first loop, 35 second)
kernels['srt_nonopt'] = Kernel("srt_nonopt",
cyclesRegPerLUP=1045)
def plotAllKernels( divideByProcesses = False ):
# kernels['trt_pure_intelOpt'] = Kernel("trt_pure_intelOpt", 41/2, 10/2 ) # vectorized (v*pd)
def plotAllKernels(divideByProcesses=False):
for kernel in kernels:
kernel.plot( divideByProcesses )
kernel.plot(divideByProcesses)
def plot(kernelName, divideByProcesses=False, label=""):
kernels[kernelName].plot(divideByProcesses, label=label)
def plot( kernelName, divideByProcesses = False, label = ""):
kernels[kernelName].plot( divideByProcesses, label=label )
if __name__ == "__main__":
plotAllKernels()
plt.legend()
plt.show()
\ No newline at end of file
plt.show()
......@@ -4,85 +4,87 @@ import sqlite3
import sys
import shutil
def getColumnNames ( db, tableName, dbName ):
def getColumnNames(db, tableName, dbName):
cursor = db.cursor()
cursor.execute("PRAGMA %s.table_info(%s)" % (dbName,tableName) )
cursor.execute("PRAGMA %s.table_info(%s)" % (dbName, tableName))
columns = cursor.fetchall()
res = []
for e in columns:
res.append ( (e[1], e[2].upper()) )
res.append((e[1], e[2].upper()))
return res
def mergeSqliteFiles ( targetFile, fileToMerge ):
db = sqlite3.connect( targetFile )
db.execute ('ATTACH "' + fileToMerge + '" AS toMerge')
targetColumns = getColumnNames( db, "runs", "main" )
toMergeColumns = getColumnNames( db, "runs", "toMerge" )
def mergeSqliteFiles(targetFile, fileToMerge):
db = sqlite3.connect(targetFile)
db.execute('ATTACH "' + fileToMerge + '" AS toMerge')
targetColumns = getColumnNames(db, "runs", "main")
toMergeColumns = getColumnNames(db, "runs", "toMerge")
columnsToCreate = [e for e in toMergeColumns if e not in targetColumns]
for column in columnsToCreate:
print "Adding Column %s to run table of %s " % ( column[0], targetFile )
db.execute ( "ALTER TABLE main.runs ADD COLUMN %s %s" % ( column[0], column[1] ) )
print
"Adding Column %s to run table of %s " % (column[0], targetFile)
db.execute("ALTER TABLE main.runs ADD COLUMN %s %s" % (column[0], column[1]))