Add helpers for mpiP
This commit is contained in:
parent
1b6199fd9d
commit
d201363569
3 changed files with 249 additions and 1 deletions
|
@ -3,4 +3,6 @@
|
|||
Collection of private utilities and scripts for Hawk.
|
||||
|
||||
- monitoring: scripts related to power monitoring
|
||||
|
||||
- performance: scripts, configs, etc. related to performance tools
|
||||
- this can be relocated into a `/opt/hlrs/non-spack/revXX` directory
|
||||
|
||||
|
|
237
performance/mpiP/share/mpip2POP.py
Executable file
237
performance/mpiP/share/mpip2POP.py
Executable file
|
@ -0,0 +1,237 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import re
|
||||
from operator import itemgetter
|
||||
import logging
|
||||
|
||||
logging.basicConfig(stream=sys.stderr, level=logging.ERROR)
|
||||
|
||||
|
||||
def get_Nranks(fn):
|
||||
import subprocess
|
||||
|
||||
cmd = "grep '^@ MPI Task Assignment' %s | sed -n '$='" % (fn,)
|
||||
result = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE)
|
||||
try:
|
||||
nranks = int(result.stdout)
|
||||
except ValueError:
|
||||
logging.info("Cannot infer nranks from MPI Task Assignments.")
|
||||
nranks = None
|
||||
|
||||
return {"ranks": nranks}
|
||||
|
||||
def get_times(fn):
|
||||
logging.debug("Getting times from %s" % fn)
|
||||
import re
|
||||
start_time_stats = re.compile("^@-+ Task Time Statistics")
|
||||
separator = re.compile("^-")
|
||||
header = re.compile(".*AppTime")
|
||||
maxline = re.compile("^Max ")
|
||||
meanline = re.compile("^Mean\s+([\.\d]+)\s+([\.\d]+)")
|
||||
minline = re.compile("^Min\s+([\.\d]+)\s+([\.\d]+)")
|
||||
stdline = re.compile("^Stddev ")
|
||||
aggrline = re.compile("^Aggregate\s+([\.\d]+)\s+([\.\d]+)")
|
||||
|
||||
times = dict()
|
||||
|
||||
with open(fn) as stream:
|
||||
# find start of time statistcs
|
||||
line = stream.readline()
|
||||
while line:
|
||||
if start_time_stats.match(line):
|
||||
break
|
||||
line = stream.readline()
|
||||
logging.debug("Done start ")
|
||||
|
||||
# expect separator
|
||||
line = stream.readline()
|
||||
if not separator.match(line):
|
||||
logging.error("Failed to parse expected separator.")
|
||||
return times
|
||||
logging.debug("Done separator")
|
||||
|
||||
# expect header
|
||||
line = stream.readline()
|
||||
if not header.match(line):
|
||||
logging.error("Failed to parse expected header.")
|
||||
return times
|
||||
logging.debug("Done header")
|
||||
|
||||
# expect Max line
|
||||
line = stream.readline()
|
||||
if not maxline.match(line):
|
||||
logging.error("Failed to parse expected Max line.")
|
||||
return times
|
||||
|
||||
# expect and parse Mean line
|
||||
line = stream.readline()
|
||||
m = meanline.match(line)
|
||||
if not m:
|
||||
logging.error("Failed to parse expected Mean line.")
|
||||
return times
|
||||
avg_app = float(m.group(1))
|
||||
avg_mpi = float(m.group(2))
|
||||
|
||||
# expect and parse Min line
|
||||
line = stream.readline()
|
||||
m = minline.match(line)
|
||||
if not m:
|
||||
logging.error("Failed to parse expected Min line.")
|
||||
return times
|
||||
min_mpi = float(m.group(2))
|
||||
|
||||
# expect Stddev line
|
||||
line = stream.readline()
|
||||
if not stdline.match(line):
|
||||
logging.error("Failed to parse expected Stddev line.")
|
||||
return times
|
||||
|
||||
# expect and parse Aggregate line
|
||||
line = stream.readline()
|
||||
m = aggrline.match(line)
|
||||
if not m:
|
||||
logging.error("Failed to parse expected Aggregate line.")
|
||||
return times
|
||||
aggr_app = float(m.group(1))
|
||||
logging.debug("Done times")
|
||||
|
||||
elapsed = avg_app
|
||||
times["elapsed"] = elapsed
|
||||
times["avg useful"] = elapsed - avg_mpi
|
||||
times["max useful"] = elapsed - min_mpi
|
||||
times["ranks_alt"] = round(aggr_app/avg_app)
|
||||
|
||||
return times
|
||||
|
||||
|
||||
def efficiencies(fn):
|
||||
metrics = dict()
|
||||
|
||||
metrics.update(get_Nranks(fn))
|
||||
metrics.update(get_times(fn))
|
||||
|
||||
if metrics["ranks"] == None:
|
||||
metrics["ranks"] = metrics["ranks_alt"]
|
||||
|
||||
metrics["PE"] = metrics["avg useful"]/metrics["elapsed"]
|
||||
metrics["LB"] = metrics["avg useful"]/metrics["max useful"]
|
||||
metrics["CE"] = metrics["max useful"]/metrics["elapsed"]
|
||||
|
||||
return metrics
|
||||
|
||||
def scalabilities(metrics, strong=None):
|
||||
if strong == None:
|
||||
# Try to determine scaling type. For now assume weak
|
||||
strong = False
|
||||
|
||||
ref = metrics[0]
|
||||
|
||||
nranks_ref = ref["ranks"]
|
||||
useful_ref = ref["avg useful"]
|
||||
|
||||
for m in metrics:
|
||||
scal = useful_ref/m["avg useful"]
|
||||
if strong:
|
||||
scal *= nranks_ref/m["ranks"]
|
||||
m["CScal"] = scal
|
||||
m["GE"] = m["PE"]*scal
|
||||
|
||||
return metrics
|
||||
|
||||
def dump(metrics, stream=sys.stdout, sep=",", keys=None):
|
||||
if not keys:
|
||||
keys = [
|
||||
"ranks",
|
||||
"elapsed", "avg useful", "max useful",
|
||||
"GE", "PE", "LB", "CE", "CScal"
|
||||
]
|
||||
|
||||
header = "# " + sep.join(keys) + "\n"
|
||||
stream.write(header)
|
||||
|
||||
for m in metrics:
|
||||
line = sep.join([str(m[key]) for key in keys]) + "\n"
|
||||
stream.write(line)
|
||||
|
||||
|
||||
def pretty_print(metrics, stream=sys.stdout):
|
||||
descriptions = {
|
||||
"GE": " GE ",
|
||||
"PE": " PE ",
|
||||
"LB": " LB ",
|
||||
"CE": " CE ",
|
||||
"TrE": " TrE ",
|
||||
"SerR": " SerE ",
|
||||
"CScal": " CScal ",
|
||||
"elapsed": " Elapsed time ",
|
||||
"avg useful": " Average useful ",
|
||||
"max useful": " Max useful "
|
||||
}
|
||||
eff_keys = ["GE", "PE", "LB", "CE"]
|
||||
scal_keys = ["CScal"]
|
||||
other_keys = ["elapsed", "avg useful", "max useful"]
|
||||
separator = "|"
|
||||
|
||||
width_desc = len(descriptions["GE"])
|
||||
width_col = 7
|
||||
width_separator = len(separator)
|
||||
|
||||
ncols = len(metrics)
|
||||
skip = "-" * (width_desc + (width_separator+width_col)*ncols + width_separator)
|
||||
|
||||
table = ""
|
||||
|
||||
table += skip + "\n"
|
||||
|
||||
table += " " * width_desc + separator
|
||||
for col in metrics:
|
||||
table += f'{col["ranks"]:^{width_col}d}' + separator
|
||||
table += "\n"
|
||||
table += skip + "\n"
|
||||
|
||||
for key in eff_keys:
|
||||
line = descriptions[key] + separator
|
||||
for col in metrics:
|
||||
line += f'{col[key]:^{width_col}.2f}' + separator
|
||||
table += line + "\n"
|
||||
for key in scal_keys:
|
||||
line = descriptions[key] + separator
|
||||
for col in metrics:
|
||||
line += f'{col[key]:^{width_col}.2f}' + separator
|
||||
table += line + "\n"
|
||||
table += skip + "\n"
|
||||
|
||||
for key in other_keys:
|
||||
line = descriptions[key] + separator
|
||||
for col in metrics:
|
||||
line += f'{col[key]:^{width_col}.2g}' + separator
|
||||
table += line + "\n"
|
||||
table += skip + "\n"
|
||||
|
||||
stream.write(table)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
filenames = sys.argv[1:]
|
||||
|
||||
metrics = list()
|
||||
|
||||
# compute parallel efficiencies for each input
|
||||
for fn in filenames:
|
||||
print("Processing file %s" % fn)
|
||||
metrics.append(efficiencies(fn))
|
||||
|
||||
# sort metrics by ranks
|
||||
metrics.sort(key=itemgetter('ranks'))
|
||||
|
||||
# compute scalabilities
|
||||
metrics = scalabilities(metrics)
|
||||
|
||||
dump(metrics)
|
||||
pretty_print(metrics)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
9
performance/mpiP/share/trace_mpiP.sh
Executable file
9
performance/mpiP/share/trace_mpiP.sh
Executable file
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
# default mpiP config
|
||||
DEFAULT_MPIP="-c -d"
|
||||
|
||||
# set mpiP config to default if not provided by user
|
||||
MPIP=${MPIP:-$DEFAULT_MPIP}
|
||||
export MPIP
|
||||
env LD_PRELOAD=libmpiP.so $@
|
Loading…
Reference in a new issue