Skip to content
Snippets Groups Projects
Commit 7e31bb7d authored by Christoph Alt's avatar Christoph Alt
Browse files

Feature/dashboards

parent e2c3dee7
Branches
1 merge request!1Feature/dashboards
......@@ -3,3 +3,4 @@ __pycache__/
*.swp
*.eggs
*.egg-info/
.env
stages:
- test
- deploy
test:
stage: test
......@@ -11,3 +12,12 @@ test:
- pip install --user .
- python -m pytest
deploy_uniformgrid_gpu:
stage: deploy
tags:
- docker
image: python:3.8
script:
- pip install --user .
- python3 dashboards/deploy.py
when: manual
from dataclasses import asdict, dataclass
from typing import List
from grafanalib.core import (Dashboard, Repeat, RowPanel, Template, Templating,
Time, TimeSeries)
from grafanalib.influxdb import InfluxDBTarget
from dashboards.influx_queries import Query
def get_influx_target(target_query: str) -> InfluxDBTarget:
return InfluxDBTarget(
query=target_query
)
def get_dashboard_variable_query(name: str, template_query: str, dataSource: str, **kwargs) -> Template:
return Template(
name,
template_query,
dataSource=dataSource,
type="query",
includeAll=True,
multi=True,
**kwargs
)
@dataclass
class DashboardOptions:
title: str
description: str
tags: List[str]
timezone: str = "browser"
def build_row_repeat_dashboard(options: DashboardOptions,
row_repeat_var: Template,
panel_repeat_var: Template,
dataSource: str,
measurment_name: str,
panel_query: Query,
unit: str,
other_vars: List[Template] = [],
) -> Dashboard:
"""Build a Dashboard that takes one query and repeats that with 2 variables."""
dashboard = Dashboard(
**asdict(options),
rows=[
RowPanel(
title=f'{row_repeat_var.name}: ${row_repeat_var.name}',
collapsed=True,
panels=[
TimeSeries(
title=f'{panel_repeat_var.name}: ${panel_repeat_var.name}',
dataSource=dataSource,
targets=[
get_influx_target(str(panel_query))
],
repeat=Repeat('h', panel_repeat_var.name),
unit=unit,
),
],
repeat=Repeat('v', row_repeat_var.name),
),
],
templating=Templating([panel_repeat_var, row_repeat_var, *other_vars]),
time=Time('now-7d', 'now'),
)
return dashboard.auto_panel_ids()
from typing import List
from dashboards.dashboard_base import (DashboardOptions,
build_row_repeat_dashboard,
get_dashboard_variable_query)
from dashboards.influx_queries import Query, join_variable_and, show_tag_values
def _uniform_grid(arch: str, group_by: List[str]):
dataSource = 'InfluxDB-1'
measurment_name = f'UniformGrid{arch}'
row_repeat = "collisionSetup"
panel_repeat = "host"
unit = 'MLUPs per Process'
row_repeat_var = get_dashboard_variable_query(row_repeat,
show_tag_values(measurment_name, row_repeat),
dataSource)
panel_repeat_var = get_dashboard_variable_query(panel_repeat,
show_tag_values(measurment_name, panel_repeat),
dataSource)
other_filter = 'cellsPerBlock_0'
cellsPerBlock_var = get_dashboard_variable_query(other_filter,
show_tag_values(measurment_name, other_filter),
dataSource
)
where = join_variable_and([row_repeat, panel_repeat, other_filter])
query = Query(
select_='mlupsPerProcess',
from_=measurment_name,
where_=where,
group_by=group_by
)
options = DashboardOptions(
title=f'Uniform Grid {arch}',
description=f"Benchmark dashboard for the Uniform Grid {arch} Benchmark from walberla",
tags=[
arch,
'benchmark',
'walberla',
'Uniform Grid'
],
timezone="browser",
)
return build_row_repeat_dashboard(options,
row_repeat_var,
panel_repeat_var,
dataSource,
measurment_name,
query,
unit,
[cellsPerBlock_var]
)
def dashboard_uniformGridGPU():
group_by = ["blocks_0",
"blocks_1",
"blocks_2",
"cellsPerBlock_0",
"cellsPerBlock_1",
"cellsPerBlock_2",
"gpuBlockSize_0",
"gpuBlockSize_1",
"gpuBlockSize_2",
"collisionSetup",
"stencil",
"streamingPattern",
]
return _uniform_grid("GPU", group_by)
def dashboard_uniformGridCPU():
group_by = ["blocks_0",
"blocks_1",
"blocks_2",
"cellsPerBlock_0",
"cellsPerBlock_1",
"cellsPerBlock_2",
"periodic_0",
"periodic_1",
"periodic_2",
"collisionSetup",
"mpi_num_processes",
"streamingPattern",
"timeStepStrategy",
"stencil",
]
return _uniform_grid("CPU", group_by)
def dashboard_granular_gas():
dataSource = 'InfluxDB-1'
measurment_name = 'MESA_PD_KernelBenchmark'
row_repeat = "kernel"
panel_repeat = "host"
unit = 'ms'
row_repeat_var = get_dashboard_variable_query(row_repeat,
f"SHOW FIELD KEYS FROM {measurment_name}",
dataSource)
panel_repeat_var = get_dashboard_variable_query(panel_repeat,
show_tag_values(measurment_name, panel_repeat),
dataSource)
query = Query(
select_='$kernel',
from_=measurment_name,
where_=f'"{panel_repeat}" =~ /^${panel_repeat}$/',
group_by=['mpi_num_processes', 'omp_max_threads'])
options = DashboardOptions(
title='Granular Gas Kernel Benchmark',
description="Benchmark dashboard for the Granular Gas Benchmark from walberla",
tags=[
'CPU',
'benchmark',
'walberla',
'Granular Gas'
],
timezone="browser",
)
return build_row_repeat_dashboard(options,
row_repeat_var,
panel_repeat_var,
dataSource,
measurment_name,
query,
unit)
def dashboard_phasefieldallenchan():
dataSource = 'InfluxDB-1'
measurment_name = 'PhaseFieldAllenCahn'
row_repeat = "cellsPerBlock_0"
panel_repeat = "host"
unit = 'MLUPs per Process'
options = DashboardOptions(
title='Phase Field Allen Chan',
description="Benchmark dashboard for the Phasefield Allen Cahn Benchmark from walberla",
tags=[
'CPU',
'benchmark',
'walberla',
'PhaseField Allen Cahn'
],
timezone="browser",
)
row_repeat_var = get_dashboard_variable_query(row_repeat,
show_tag_values(measurment_name, row_repeat),
dataSource)
panel_repeat_var = get_dashboard_variable_query(panel_repeat,
show_tag_values(measurment_name, panel_repeat),
dataSource)
query = Query(
select_='mlupsPerProcess',
from_=measurment_name,
where_=join_variable_and([row_repeat, panel_repeat]),
group_by=[
"blocks_0",
"blocks_1",
"blocks_2",
"cellsPerBlock_0",
"mpi_num_processes",
"host",
"executable",
"timeStepStrategy",
"stencil_phase",
"stencil_hydro"
]
)
return build_row_repeat_dashboard(options,
row_repeat_var,
panel_repeat_var,
dataSource,
measurment_name,
query,
unit)
import dashboards.dashboard_list as boards
from dashboards.upload import upload_dashboard
def main():
walberla_folder = 8
for board in [getattr(boards, func) for func in dir(boards) if func.startswith("dashboard_")]:
upload_dashboard(board(), folder=walberla_folder)
if __name__ == "__main__":
main()
from dataclasses import dataclass, field
from itertools import repeat
from typing import List, Union
@dataclass
class Query:
select_: str
from_: str
where_: str
group_by: List[str] = field(default_factory=list)
def __str__(self):
ret = f'SELECT \"{self.select_}\" '
ret += f'FROM \"{self.from_}\" '
ret += f'WHERE ({self.where_}) AND $timeFilter '
group_by = ', '.join(f'"{tag}"' for tag in self.group_by)
ret += f'GROUP BY {group_by}'
return ret
def show_tag_values(table: str, key_name: str) -> str:
"""Return influx query to get all tag values from a measurment."""
return f"SHOW TAG VALUES FROM \"{table}\" WITH key = \"{key_name}\""
def get_variable_condition(variable_name: str) -> str:
clean = variable_name.strip()
if not clean:
raise ValueError("Empty variable name")
return f'"{clean}" =~ /^${clean}$/'
def join_conditions(conditions: List[str], operators: Union[List[str], str]):
ops = operators
if isinstance(operators, str):
ops = repeat(operators, len(conditions) - 1)
elif len(operators) == 1:
ops = repeat(operators[0], len(conditions) - 1)
else:
if len(conditions) - 1 != len(operators):
raise ValueError("unfitting lengths of conditions and operators")
ret = conditions[0]
for op, cond in zip(ops, conditions[1:]):
ret += f' {op} {cond}'
return ret
def join_variable_and(variable_names: List[str]) -> str:
return join_conditions(
[get_variable_condition(name) for name in variable_names],
"AND"
)
from typing import Tuple
from grafanalib.core import Dashboard
from grafanalib._gen import DashboardEncoder
import json
import requests
import os
import dotenv
def get_dashboard_json(dashboard, overwrite=False, message="Updated by grafanlib", folder=0):
'''
get_dashboard_json generates JSON from grafanalib Dashboard object
:param dashboard - Dashboard() created via grafanalib
'''
# grafanalib generates json which need to pack to "dashboard" root element
return json.dumps(
{
"dashboard": dashboard.to_json_data(),
"overwrite": overwrite,
"message": message,
"folderId": folder,
}, sort_keys=True, indent=2, cls=DashboardEncoder)
def upload_to_grafana(json, server, api_key, verify=True):
'''
upload_to_grafana tries to upload dashboard to grafana and prints response
:param json - dashboard json generated by grafanalib
:param server - grafana server name
:param api_key - grafana api key with read and write privileges
'''
headers = {'Authorization': f"Bearer {api_key}", 'Content-Type': 'application/json'}
r = requests.post(f"https://{server}/api/dashboards/db", data=json, headers=headers, verify=verify)
# TODO: add error handling
print(f"{r.status_code} - {r.content}")
def get_folder_list(server, api_key, verify=True):
'''
upload_to_grafana tries to upload dashboard to grafana and prints response
:param server - grafana server name
:param api_key - grafana api key with read and write privileges
'''
headers = {'Authorization': f"Bearer {api_key}", 'Content-Type': 'application/json'}
r = requests.get(f"https://{server}/api/folders", headers=headers, verify=verify)
# TODO: add error handling
print(f"{r.status_code} - {r.content}")
def load_config_from_env(env_path: str = ".env") -> Tuple[str, str]:
if os.path.exists(env_path):
dotenv.load_dotenv(env_path)
grafana_api_key = os.getenv("GRAFANA_API_KEY")
grafana_server = os.getenv("GRAFANA_SERVER")
return grafana_server, grafana_api_key
def upload_dashboard(dashboard: Dashboard, folder: int) -> None:
grafana_server, grafana_api_key = load_config_from_env()
dashboard_json = get_dashboard_json(dashboard, overwrite=True, folder=folder)
upload_to_grafana(dashboard_json, grafana_server, grafana_api_key)
#!/usr/bin/env python
from setuptools import setup, find_packages
from setuptools import find_packages, setup
setup(name="cb-util",
version="0.1",
description="Collection of scripts and wrapper of contious benchmarking",
author="Christoph Alt",
author_email="Christoph.alt@fau.de",
packages=find_packages(include=["cbutil", "cbutil.postprocessing"]),
packages=find_packages(include=["cbutil",
"cbutil.postprocessing",
"dashboards"]),
install_requires=[
"python-dotenv",
"influxdb",
"gitpython",
"grafanalib",
],
setup_requires=['pytest-runner'],
tests_require=['pytest']
......
from grafanalib.core import (Dashboard, Repeat, RowPanel, Templating, Time,
TimeSeries)
from dashboards.dashboard_base import (get_dashboard_variable_query,
get_influx_target)
from dashboards.dashboard_list import dashboard_uniformGridGPU
from dashboards.influx_queries import Query, show_tag_values
dataSource = 'InfluxDB-1'
measurment_name = 'UniformGridGPU'
q1 = Query(
select_='mlupsPerProcess',
from_=measurment_name,
where_='"collisionSetup" =~ /^$collisionSetup$/ AND "host" =~ /^$host$/ AND "cellsPerBlock_0" =~ /^$cellsPerBlock_0$/',
group_by=[
"blocks_0",
"blocks_1",
"blocks_2",
"cellsPerBlock_0",
"cellsPerBlock_1",
"cellsPerBlock_2",
"gpuBlockSize_0",
"gpuBlockSize_1",
"gpuBlockSize_2",
"collisionSetup",
"stencil",
"streamingPattern"
]
)
host_var = get_dashboard_variable_query("host",
show_tag_values(measurment_name, "host"),
dataSource)
collisionsetup_var = get_dashboard_variable_query("collisionSetup",
show_tag_values(measurment_name, "collisionSetup"),
dataSource)
other_filter = 'cellsPerBlock_0'
cellsPerBlock_var = get_dashboard_variable_query(other_filter,
show_tag_values(measurment_name, other_filter),
dataSource)
dashboard = Dashboard(
title="Uniform Grid GPU",
description="Benchmark dashboard for the Uniform Grid GPU Benchmark from walberla",
tags=[
'GPU',
'benchmark',
'walberla',
'Uniform Grid'
],
timezone="browser",
rows=[
RowPanel(
title="collisionSetup: $collisionSetup",
collapsed=True,
panels=[
TimeSeries(
title='host: $host',
dataSource=dataSource,
targets=[
get_influx_target(str(q1))
],
repeat=Repeat('h', host_var.name),
unit='MLUPs per Process',
),
],
repeat=Repeat('v', collisionsetup_var.name),
),
],
templating=Templating([host_var, collisionsetup_var, cellsPerBlock_var]),
time=Time('now-7d', 'now'),
).auto_panel_ids()
def test_build_dashboard():
assert dashboard_uniformGridGPU() == dashboard
import pytest
from dashboards.influx_queries import (Query, get_variable_condition,
join_conditions, show_tag_values)
def test_query():
q = Query(
select_='mlupsPerProcess',
from_='UniformGridGPU',
where_='"host" =~ /^$host$/ AND "collisionSetup" =~ /^$collisionSetup$/',
group_by=[
"blocks_0",
"blocks_1",
"blocks_2",
"cellsPerBlock_0",
"cellsPerBlock_1",
"cellsPerBlock_2",
"gpuBlockSize_0",
"gpuBlockSize_1",
"gpuBlockSize_2",
"collisionSetup",
"stencil",
"streamingPattern"
]
)
q1 = ('SELECT "mlupsPerProcess" '
'FROM "UniformGridGPU" '
'WHERE ("host" =~ /^$host$/ AND "collisionSetup" =~ /^$collisionSetup$/) AND $timeFilter '
'GROUP BY "blocks_0", "blocks_1", "blocks_2", '
'"cellsPerBlock_0", "cellsPerBlock_1", "cellsPerBlock_2", '
'"gpuBlockSize_0", "gpuBlockSize_1", "gpuBlockSize_2", '
'"collisionSetup", "stencil", "streamingPattern"')
assert q1 == str(q)
def test_show_tag_values():
s = show_tag_values("UniformGridGPU", "host")
assert s == 'SHOW TAG VALUES FROM \"UniformGridGPU\" WITH key = \"host\"'
def test_get_variable_condtion():
assert get_variable_condition("host") == '"host" =~ /^$host$/'
assert get_variable_condition(" host ") == '"host" =~ /^$host$/'
with pytest.raises(ValueError):
get_variable_condition("")
with pytest.raises(ValueError):
get_variable_condition(" ")
with pytest.raises(ValueError):
get_variable_condition("\t ")
def test_join_conditions_two():
h = get_variable_condition("host")
c = get_variable_condition("collisionSetup")
actual = join_conditions([h, c], "AND")
excpected = '"host" =~ /^$host$/ AND "collisionSetup" =~ /^$collisionSetup$/'
assert actual == excpected
def test_join_conditions_three():
conds = [get_variable_condition(name) for name in ["host",
"collisionSetup",
"cellsPerBlock_0"]]
# excpected = '"host" =~ /^$host$/ AND "collisionSetup" =~ /^$collisionSetup$/'
assert join_conditions(conds, "AND") == " AND ".join(conds)
assert join_conditions(conds, ["AND", "AND"]) == " AND ".join(conds)
assert join_conditions(conds, ["AND"]) == " AND ".join(conds)
assert join_conditions(conds, ["OR"]) == " OR ".join(conds)
excpected = ('"host" =~ /^$host$/ '
'AND "collisionSetup" =~ /^$collisionSetup$/ '
'OR "cellsPerBlock_0" =~ /^$cellsPerBlock_0$/')
assert join_conditions(conds, ["AND", "OR"]) == excpected
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment