2023-11-22 15:04:30 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2023-11-20 10:39:11 +00:00
|
|
|
import argparse
|
2023-10-26 14:19:16 +00:00
|
|
|
import numpy as np
|
|
|
|
from collections import OrderedDict
|
|
|
|
import os.path
|
|
|
|
|
2023-11-17 15:25:51 +00:00
|
|
|
|
2023-11-22 13:35:19 +00:00
|
|
|
def parse_arguments(args):
|
2023-11-17 15:25:51 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='Produce detailed power usage data for a list of jobids.')
|
|
|
|
parser.add_argument('-v', '--verbose', action='store_true',
|
|
|
|
help='Show database querries, etc.')
|
2023-11-22 13:45:14 +00:00
|
|
|
parser.add_argument('-t', '--interval', action='store', type=float, default=5.0,
|
|
|
|
help="Interval between power values in seconds")
|
2024-02-07 14:54:13 +00:00
|
|
|
parser.add_argument('--hawk-ai', action='store_true',
|
|
|
|
help="Job did run on Hawk-AI")
|
2023-11-17 15:25:51 +00:00
|
|
|
parser.add_argument('jobid', type=parse_jobid,
|
2023-11-22 09:30:36 +00:00
|
|
|
nargs='+',
|
2023-11-20 10:39:11 +00:00
|
|
|
help='Job ID such as "2260215" or "2260215.hawk-pbs5"')
|
2023-11-17 15:25:51 +00:00
|
|
|
|
2023-11-22 13:35:19 +00:00
|
|
|
return parser.parse_args(args)
|
2023-11-17 15:25:51 +00:00
|
|
|
|
|
|
|
def parse_jobid(s):
|
|
|
|
import re
|
|
|
|
hawkpbs = r'.hawk-pbs5'
|
|
|
|
jobid = re.sub(hawkpbs, '', s)
|
2024-02-15 10:06:54 +00:00
|
|
|
not_allowed = r'[^0-9\[\]]' # Jobid can be more complex than just digits, eg 2444420[201]
|
|
|
|
if re.search(not_allowed, jobid):
|
2023-11-17 15:25:51 +00:00
|
|
|
raise argparse.ArgumentTypeError(f'invalid job ID "{s}"')
|
|
|
|
return jobid
|
|
|
|
|
2023-10-26 14:19:16 +00:00
|
|
|
|
|
|
|
class Power:
|
2024-02-12 15:29:25 +00:00
|
|
|
def __init__(self, nodes):
|
2023-10-26 14:19:16 +00:00
|
|
|
self.nodes = nodes
|
2024-02-20 13:21:12 +00:00
|
|
|
self.epochs = None
|
2023-10-26 14:19:16 +00:00
|
|
|
self.first_ts = None
|
|
|
|
self.last_ts = None
|
2024-02-15 09:29:45 +00:00
|
|
|
self.warnings = ""
|
2023-10-26 14:19:16 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_list(cls, data):
|
2024-02-12 15:45:13 +00:00
|
|
|
"""
|
|
|
|
Returns a Power instance from a list of tuples (timestamp, node, value).
|
|
|
|
|
|
|
|
Assumptions:
|
2024-02-12 16:19:50 +00:00
|
|
|
- data is sorted by timestamp ascending
|
|
|
|
- for each timestamp, there is the same set of nodes and in the same order
|
2024-02-12 15:45:13 +00:00
|
|
|
"""
|
|
|
|
|
2024-02-07 09:20:13 +00:00
|
|
|
idx_ts = 0; idx_node = 1; idx_value = 2
|
2024-02-12 16:19:50 +00:00
|
|
|
nodes = list(OrderedDict.fromkeys([line[idx_node] for line in data])) # preserves order of nodes
|
2024-02-13 13:34:08 +00:00
|
|
|
power = Power(nodes)
|
2023-11-20 15:04:19 +00:00
|
|
|
|
2023-10-26 14:19:16 +00:00
|
|
|
values = {}
|
|
|
|
for l in data:
|
2024-02-07 09:20:13 +00:00
|
|
|
ts = l[idx_ts]
|
2023-10-26 14:19:16 +00:00
|
|
|
if ts not in values:
|
|
|
|
values[ts] = []
|
2024-02-13 13:34:08 +00:00
|
|
|
value = l[idx_value]
|
|
|
|
values[ts].append(value)
|
2023-10-26 14:19:16 +00:00
|
|
|
|
2024-02-12 15:45:13 +00:00
|
|
|
epochs = values.keys()
|
2023-10-26 14:19:16 +00:00
|
|
|
for epoch in epochs:
|
2024-02-13 13:34:08 +00:00
|
|
|
power.insert_epoch(epoch, values[epoch])
|
2024-02-12 15:45:13 +00:00
|
|
|
|
|
|
|
# check implicit assumptions: 1) ts/epochs are sorted
|
|
|
|
e = list(epochs)
|
|
|
|
k = list(values.keys())
|
|
|
|
if not e == k:
|
2024-02-15 09:29:45 +00:00
|
|
|
power.warnings += "# Warning: Unexpected unsorted timestamps.\n"
|
2024-02-12 15:45:13 +00:00
|
|
|
|
|
|
|
# check implicit assumptions: 2) each line has #nodes values
|
|
|
|
nnodes = len(nodes)
|
|
|
|
for epoch in epochs:
|
|
|
|
actual = len(values[epoch])
|
|
|
|
if actual != nnodes:
|
2024-02-15 09:29:45 +00:00
|
|
|
power.warnings += "# Warning: Unexpected number of nodes ({actual}/{expected})\n".format(actual=actual, expected=nnodes)
|
2024-02-12 15:45:13 +00:00
|
|
|
break
|
2023-10-26 14:19:16 +00:00
|
|
|
|
2024-02-13 13:34:08 +00:00
|
|
|
return power
|
2023-11-20 14:53:52 +00:00
|
|
|
|
2023-11-22 08:01:05 +00:00
|
|
|
@classmethod
|
2024-02-07 14:54:13 +00:00
|
|
|
def from_db(cls, db, jobid, interval, hawk_ai):
|
2024-02-20 13:21:12 +00:00
|
|
|
df = db.db_to_pf(jobid, interval, hawk_ai)
|
|
|
|
power = cls.from_pandas(df, {'time': 'time', 'name': 'node', 'avg': 'power'})
|
2023-11-22 09:52:49 +00:00
|
|
|
|
2024-02-20 13:21:12 +00:00
|
|
|
return power
|
2023-11-22 13:45:14 +00:00
|
|
|
|
2024-02-20 13:21:12 +00:00
|
|
|
@classmethod
|
|
|
|
def from_pandas(cls, dataframe, columns):
|
|
|
|
dataframe.rename(columns=columns, inplace=True)
|
|
|
|
by_node = dataframe.groupby('node')
|
|
|
|
nodes = list(by_node.groups.keys())
|
|
|
|
|
|
|
|
power = cls(nodes)
|
|
|
|
power.epochs = dataframe.groupby('time')
|
|
|
|
power.dataframe = dataframe
|
|
|
|
power.by_node = power.dataframe.groupby('node')
|
2024-02-16 15:01:28 +00:00
|
|
|
power.energy = power.summarize_energy()
|
|
|
|
|
2024-02-20 13:21:12 +00:00
|
|
|
return power
|
|
|
|
|
2024-02-07 10:31:30 +00:00
|
|
|
def to_file(self, jobid, header=""):
|
2023-11-20 14:53:52 +00:00
|
|
|
"""Dumps power data to file. Returns filename is succesfull and None if unsucessfull."""
|
|
|
|
fname = self.filename(jobid)
|
|
|
|
if os.path.exists(fname):
|
|
|
|
print("Error: cowardly refusing to overwrite file ", fname)
|
|
|
|
return None
|
|
|
|
|
2024-02-15 09:29:45 +00:00
|
|
|
header += self.warnings
|
2024-02-16 15:01:28 +00:00
|
|
|
header += self.energy
|
2023-11-20 14:53:52 +00:00
|
|
|
try:
|
|
|
|
with open(fname, "w+") as f:
|
2024-02-07 10:31:30 +00:00
|
|
|
f.write(header + self.header())
|
2023-11-20 14:53:52 +00:00
|
|
|
f.write(self.body())
|
|
|
|
except IOError:
|
2023-11-22 13:46:51 +00:00
|
|
|
print("Error: could not write to file ", fname)
|
2023-11-20 14:53:52 +00:00
|
|
|
fname = None
|
|
|
|
|
|
|
|
return fname
|
2023-10-26 14:19:16 +00:00
|
|
|
|
2023-11-20 14:53:52 +00:00
|
|
|
def insert_epoch(self, ts, values):
|
|
|
|
self.epochs[ts] = values
|
|
|
|
if not self.first_ts:
|
|
|
|
self.first_ts = ts
|
|
|
|
self.last_ts = ts
|
|
|
|
|
2023-10-26 14:19:16 +00:00
|
|
|
def header(self):
|
|
|
|
hd = "# all timestamp have unit miliseconds since unix epoch\n"
|
|
|
|
hd += "# all power values have unit Watt\n"
|
2024-02-12 15:29:25 +00:00
|
|
|
hd += "timestamp,RESERVED,head_node_power,avg_node_power,median_node_power,min_node_power,max_node_power,std_dev_node_power"
|
2023-10-26 14:19:16 +00:00
|
|
|
# add node names here instead
|
2024-02-12 16:36:03 +00:00
|
|
|
hd += "," + ",".join(self.nodes)
|
|
|
|
hd += "\n"
|
2023-10-26 14:19:16 +00:00
|
|
|
return hd
|
|
|
|
|
2023-11-20 14:53:52 +00:00
|
|
|
def body(self):
|
|
|
|
_body = ""
|
2024-02-20 13:21:12 +00:00
|
|
|
for epoch in self.epochs:
|
2023-11-22 13:46:25 +00:00
|
|
|
_body += self.pretty_print(self.summarize_epoch(epoch))
|
2023-11-20 14:53:52 +00:00
|
|
|
return _body
|
|
|
|
|
2023-11-22 13:45:14 +00:00
|
|
|
def summarize_time(self, ts):
|
2024-02-12 15:29:25 +00:00
|
|
|
return ts, ""
|
2023-11-20 14:53:52 +00:00
|
|
|
|
2023-10-26 14:19:16 +00:00
|
|
|
@staticmethod
|
2024-02-20 13:21:12 +00:00
|
|
|
def summarize_values(df):
|
|
|
|
values = df['power'].values
|
2023-10-26 14:19:16 +00:00
|
|
|
head = values[0]
|
|
|
|
min, max = values.min(), values.max()
|
|
|
|
avg, stddev = values.mean(), values.std()
|
|
|
|
median = np.median(values)
|
|
|
|
return head, avg, median, min, max, stddev
|
|
|
|
|
2023-11-22 13:45:14 +00:00
|
|
|
def summarize_epoch(self, epoch):
|
2023-10-26 14:19:16 +00:00
|
|
|
ts, values = epoch
|
2023-11-22 13:45:14 +00:00
|
|
|
return self.summarize_time(ts) \
|
2024-02-12 16:36:03 +00:00
|
|
|
+ self.summarize_values(values) \
|
2024-02-20 13:21:12 +00:00
|
|
|
+ self.all_values(values)
|
|
|
|
|
|
|
|
def all_values(self, values):
|
|
|
|
return tuple(values['power'].tolist())
|
2024-02-16 15:01:28 +00:00
|
|
|
|
|
|
|
def energy_total(self):
|
|
|
|
energy = None
|
|
|
|
if hasattr(self, "by_node"):
|
|
|
|
energy = self.by_node.apply(self.energy_node).sum()
|
|
|
|
return energy
|
2023-10-26 14:19:16 +00:00
|
|
|
|
2024-02-16 15:01:28 +00:00
|
|
|
@staticmethod
|
|
|
|
def energy_node(group):
|
|
|
|
"""Left-sided Riemann sum is enough, as time is lower bound of bucket"""
|
|
|
|
delta_t = group["time"].diff().shift(-1)/1000. # in seconds
|
2024-02-20 13:21:12 +00:00
|
|
|
pow = group['power']
|
2024-02-16 15:01:28 +00:00
|
|
|
return (delta_t * pow).iloc[:-1].sum()
|
|
|
|
|
|
|
|
def summarize_energy(self):
|
|
|
|
return "# Total energy consumed by job: {energy:.0f} J\n".format(energy=self.energy_total())
|
|
|
|
|
2024-02-20 15:49:32 +00:00
|
|
|
@staticmethod
|
|
|
|
def to_csv(args):
|
|
|
|
return ",".join(str(a) for a in args)
|
|
|
|
|
2023-10-26 14:19:16 +00:00
|
|
|
@staticmethod
|
|
|
|
def pretty_print(args):
|
2024-02-20 15:49:32 +00:00
|
|
|
return Power.to_csv(args) + '\n'
|
2023-10-26 14:19:16 +00:00
|
|
|
|
|
|
|
def filename(self, jobid):
|
|
|
|
fname = "detailed_power_{jobid}.hawk-pbs5.{first}-{last}.csv".format(
|
|
|
|
jobid=jobid, first=self.first_ts, last=self.last_ts
|
|
|
|
)
|
|
|
|
return fname
|
|
|
|
|
2023-11-22 09:57:11 +00:00
|
|
|
|
2023-11-20 15:21:54 +00:00
|
|
|
class MonitoringDB:
|
2024-02-06 08:04:52 +00:00
|
|
|
QUERY_STRING_HAWK = """
|
2024-02-07 13:28:57 +00:00
|
|
|
-- For description of get_job_data(), see https://kb.hlrs.de/monitoring/index.php/TimescaleDB_-_Query_Guidelines#Function:_get_job_data_and_get_ai_job_data
|
|
|
|
select * from get_job_data(
|
|
|
|
'{jobid}.hawk-pbs5',
|
|
|
|
'cmc_power_racktraynodepoweravg', -- power data source
|
|
|
|
'{interval} seconds',
|
|
|
|
array['avg'], -- aggregation: average across samples in bucket
|
|
|
|
array['time','node'] -- sort by time first than node (ascending)
|
2024-01-26 10:35:44 +00:00
|
|
|
)
|
2024-02-07 13:28:57 +00:00
|
|
|
as t(time bigint, name varchar, avg double precision);
|
2024-02-07 14:54:13 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
QUERY_STRING_HAWK_AI = """
|
|
|
|
-- For description of get_ai_job_data(), see https://kb.hlrs.de/monitoring/index.php/TimescaleDB_-_Query_Guidelines#Function:_get_job_data_and_get_ai_job_data
|
|
|
|
select * from get_ai_job_data(
|
|
|
|
'{jobid}.hawk-pbs5',
|
|
|
|
'telegraf_ipmi_power_meter', -- power data source
|
|
|
|
'{interval} seconds',
|
|
|
|
array['avg'], -- aggregation: average across samples in bucket
|
|
|
|
array['time','node'] -- sort by time first than node (ascending)
|
|
|
|
)
|
|
|
|
as t(time bigint, name varchar, avg double precision);
|
2024-02-07 13:28:57 +00:00
|
|
|
"""
|
|
|
|
|
2024-02-06 08:04:52 +00:00
|
|
|
def __init__(self, verbose):
|
|
|
|
self.connection = self.init_db(verbose)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def init_db(verbose):
|
|
|
|
import sqlalchemy as db
|
|
|
|
engine = db.create_engine('postgresql://hpc@hawk-monitor4:5432/coe_mon', echo=verbose)
|
|
|
|
connection = engine.connect()
|
|
|
|
return connection
|
|
|
|
|
|
|
|
def close_db(self):
|
|
|
|
return self.connection.close()
|
|
|
|
|
|
|
|
@classmethod
|
2024-02-07 14:54:13 +00:00
|
|
|
def build_query(cls, jobid, interval, hawk_ai):
|
2024-02-06 08:04:52 +00:00
|
|
|
import sqlalchemy as db
|
2024-02-07 14:54:13 +00:00
|
|
|
if hawk_ai:
|
|
|
|
query_string = cls.QUERY_STRING_HAWK_AI
|
|
|
|
else:
|
|
|
|
query_string = cls.QUERY_STRING_HAWK
|
2023-11-20 15:21:54 +00:00
|
|
|
return db.text(query_string.format(jobid=jobid, interval=interval))
|
|
|
|
|
2024-02-07 14:54:13 +00:00
|
|
|
def db_to_list(self, jobid, interval, hawk_ai):
|
|
|
|
query = self.build_query(jobid, interval, hawk_ai)
|
2023-11-20 15:50:22 +00:00
|
|
|
return self.connection.execute(query).fetchall()
|
2023-11-20 15:21:54 +00:00
|
|
|
|
2024-02-07 15:03:39 +00:00
|
|
|
def db_to_pf(self, jobid, interval, hawk_ai):
|
|
|
|
import pandas as pd
|
2024-02-07 14:54:13 +00:00
|
|
|
query = self.build_query(jobid, interval, hawk_ai)
|
2023-11-20 15:50:22 +00:00
|
|
|
return pd.read_sql(query, con=self.connection)
|
2023-10-26 14:19:16 +00:00
|
|
|
|
2023-11-22 09:57:11 +00:00
|
|
|
|
2023-11-22 09:30:36 +00:00
|
|
|
class App:
|
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
self.db = MonitoringDB(self.config.verbose)
|
|
|
|
|
2024-02-07 13:06:53 +00:00
|
|
|
@staticmethod
|
|
|
|
def warnings(config):
|
|
|
|
warn = ""
|
2024-02-07 14:54:13 +00:00
|
|
|
if not config.hawk_ai and config.interval < 5:
|
2024-02-07 13:06:53 +00:00
|
|
|
warn += '# Warning: interval<5 is very small and may lead to data gaps.'
|
2024-02-07 14:54:13 +00:00
|
|
|
if config.hawk_ai and config.interval < 60:
|
|
|
|
warn += '# Warning: interval<60 is very small for Hawk-AI nodes and may lead to data gaps.'
|
2024-02-07 13:06:53 +00:00
|
|
|
return warn
|
|
|
|
|
2023-11-22 09:30:36 +00:00
|
|
|
def run_all(self):
|
2024-02-07 13:06:53 +00:00
|
|
|
warnings = self.warnings(self.config)
|
|
|
|
if warnings:
|
|
|
|
print(warnings)
|
|
|
|
|
2024-02-16 14:59:15 +00:00
|
|
|
header = f"# {self.config.datetime}: {self.config.cmd}\n"
|
2024-02-07 13:06:53 +00:00
|
|
|
if warnings:
|
|
|
|
header += f"{warnings}\n"
|
|
|
|
header += "#\n"
|
|
|
|
|
2023-11-22 09:30:36 +00:00
|
|
|
for jobid in self.config.jobid:
|
2023-11-22 09:52:49 +00:00
|
|
|
try:
|
2024-02-07 14:54:13 +00:00
|
|
|
power = Power.from_db(self.db, jobid, self.config.interval, self.config.hawk_ai)
|
2023-11-22 09:52:49 +00:00
|
|
|
except RuntimeError:
|
|
|
|
print('No data found for job ID "{}"'.format(jobid))
|
|
|
|
continue
|
2023-11-22 09:57:11 +00:00
|
|
|
|
2024-02-07 10:31:30 +00:00
|
|
|
fn = power.to_file(jobid, header)
|
2023-11-22 09:30:36 +00:00
|
|
|
if fn:
|
|
|
|
print('Created file {fn}'.format(fn=fn))
|
2024-02-15 09:29:45 +00:00
|
|
|
if power.warnings:
|
|
|
|
print(power.warnings)
|
2024-02-16 15:01:28 +00:00
|
|
|
if power.energy:
|
|
|
|
print(power.energy)
|
2023-10-26 14:19:16 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2023-11-22 13:35:19 +00:00
|
|
|
import sys
|
2024-02-07 10:31:30 +00:00
|
|
|
from datetime import datetime
|
2023-11-22 13:35:19 +00:00
|
|
|
config = parse_arguments(sys.argv[1:])
|
2024-02-07 10:31:30 +00:00
|
|
|
config.cmd = " ".join(sys.argv)
|
|
|
|
config.datetime = f"{datetime.now()}"
|
2023-11-17 15:25:51 +00:00
|
|
|
|
2023-11-22 09:30:36 +00:00
|
|
|
main = App(config)
|
|
|
|
main.run_all()
|
2023-10-26 14:19:16 +00:00
|
|
|
|