This commit is contained in:
oscar 2022-11-23 20:05:48 -05:00
parent 4ff2de5d1e
commit d0c7329f9f
4 changed files with 307 additions and 30 deletions

1
.pyenv
View File

@ -1 +0,0 @@
PYTHONPATH="./scripts/libs"

View File

@ -64,6 +64,11 @@ def process_dir(rootdir: str, dat : dict[any]):
elif sendfile == "False" and tls == "True" and ktls == "True":
mode = "ktls"
if "icelake" in server:
server = "icelake"
elif "milan" in server:
server = "milan"
memloadgen_bps = []
logs = []
enum_files(rootdir, logs)
@ -92,10 +97,10 @@ def process_dir(rootdir: str, dat : dict[any]):
print("Warning: failed to parse " + log + ". err: " + str(e))
return
if (affinity,mode) not in dat[(memloadgen, fs)]:
dat[(memloadgen, fs)][(affinity,mode)] = []
if (affinity,mode) not in dat[server][(memloadgen, fs)]:
dat[server][(memloadgen, fs)][(affinity,mode)] = []
dat[(memloadgen, fs)][(affinity,mode)].append((parser.aggregate_egress_bps, np.sum(memloadgen_bps)))
dat[server][(memloadgen, fs)][(affinity,mode)].append((parser.aggregate_egress_bps, np.sum(memloadgen_bps)))
return
for subdir in os.listdir(rootdir):
@ -117,35 +122,41 @@ def main():
dat = dict()
for c in ["nvdimm", "tmpfs", "ssd"]:
for b in ["0", "50", "False"]:
dat[(b, c)] = dict()
for a in ["icelake", "milan"]:
dat[a] = dict()
for c in ["nvdimm", "tmpfs", "ssd"]:
for b in ["0", "50", "False"]:
dat[a][(b, c)] = dict()
process_dir(datdir, dat)
for c in ["tmpfs", "ssd"]:
for b in ["0", "50", "False"]:
print("memloadgen: " + b + ",storage: " + c)
data = dat[(b, c)]
print("affinity,write,sendfile,tls,ktls,ktls + sendfile")
if data == None:
print("N/A,N/A,N/A,N/A,N/A,N/A")
for affinity in ["socket00", "socket01", "socket10", "socket11"]:
line = f"{affinity},"
for mode in ["write", "sendfile", "tls", "ktls", "ktls + sendfile"]:
if (affinity, mode) not in data:
line += "N/A,"
else:
vals = data[(affinity, mode)]
real_vals = []
real_mlg = []
for i in range(0, len(vals)):
real_vals.append(vals[i][0] / 1024.0 / 1024.0 / 1024.0 / 8.0)
real_mlg.append(vals[i][1] / 1024.0 / 1024.0 / 1024.0)
line += "{:.2f} ({:.2f}) [{:.2f} ({:.2f})]".format(np.average(real_vals), np.std(real_vals),
np.average(real_mlg), np.std(real_mlg)) + ","
print(line)
print("")
for a in ["icelake", "milan"]:
data2 = dat[a]
print(a)
for c in ["tmpfs", "ssd"]:
for b in ["0", "50", "False"]:
print("memloadgen: " + b + ",storage: " + c)
data = data2[(b, c)]
print("affinity,write,sendfile,tls,ktls,ktls + sendfile")
if data == None:
print("N/A,N/A,N/A,N/A,N/A,N/A")
for affinity in ["socket00", "socket01", "socket10", "socket11"]:
line = f"{affinity},"
for mode in ["write", "sendfile", "tls", "ktls", "ktls + sendfile"]:
if (affinity, mode) not in data:
line += "N/A,"
else:
vals = data[(affinity, mode)]
real_vals = []
real_mlg = []
for i in range(0, len(vals)):
real_vals.append(vals[i][0] / 1024.0 / 1024.0 / 1024.0 / 8.0)
real_mlg.append(vals[i][1] / 1024.0 / 1024.0 / 1024.0)
line += "{:.2f} ({:.2f}) [{:.2f} ({:.2f})]".format(np.average(real_vals), np.std(real_vals),
np.average(real_mlg), np.std(real_mlg)) + ","
print(line)
print("")
print("")
if __name__ == "__main__":

117
scripts/iperf_graph.py Normal file
View File

@ -0,0 +1,117 @@
#!/usr/bin/env python3.6
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import ticker
import numpy as np
import os
import sys
import libpar as par
import getopt
import iperfconf as ic
#
# memloadgen vs max throughput
#
marker_map = ["o", "P", "s", "v", "*", "+", "^", "1", "2", "d", "X", "o", "P", "s", "v", "*", "+", "^", "1", "2", "d", "X"]
color_map = ["xkcd:black", "xkcd:red", "xkcd:blue", "xkcd:green", "xkcd:cyan", "xkcd:purple", "xkcd:orange", "xkcd:salmon", "xkcd:lightgreen", "xkcd:indigo", "xkcd:brown", "xkcd:bubblegum", "xkcd:lavender", "xkcd:maroon", "xkcd:fern", "xkcd:sky", "xkcd:orchid", "xkcd:sienna"]
parser_idx_labels = ["srv_hw", "srv_sw", "clt_hw", "clt_sw"]
def generate_graph(server_name : str, affinity : str, fn : str, dat : dict[ic.Conf, ic.ConfData]):
marker_idx = 0
color_idx = 0
ax = plt.gca()
ax.set_yscale("linear")
ax.set_title(server_name)
ax.set_xlabel("Memload Percent (%)")
ax.set_ylabel("Throughput (GB/s)")
ax.set_ylim(0, 13)
ax.xaxis.get_major_formatter().set_scientific(False)
ax.yaxis.set_minor_formatter(ticker.ScalarFormatter())
graph_data : dict(ic.Conf, ic.ConfData) = dict()
print(f"Generating graph => {server_name}_{affinity} ...")
for conf in dat:
if (conf.server == server_name) and (conf.affinity == affinity):
if conf not in graph_data:
graph_data[conf] = dat[conf]
else:
raise Exception("duplicate conf found!")
labels = []
xs = []
ys = []
errs = []
skip_list = []
# one curve per conf
each_conf : ic.Conf
for each_conf in graph_data:
if each_conf in skip_list:
continue
label = each_conf.to_string_graph()
bps = []
pct = []
err = []
other_conf : ic.Conf
for other_conf in graph_data:
if other_conf.equal_except_memload(each_conf):
bps.append(np.average(graph_data[other_conf].get_bps()) / 8 / 1024 / 1024 / 1024)
err.append(np.std(graph_data[other_conf].get_bps()) / 8 / 1024 / 1024 / 1024)
pct.append(int(other_conf.memloadgen))
skip_list.append(other_conf)
arg = np.argsort(pct)
labels.append(label)
xs.append(np.array(pct)[arg])
ys.append(np.array(bps)[arg])
errs.append(np.array(err)[arg])
arg = np.argsort(labels)
lsorted = np.array(labels)[arg]
xsorted = np.array(xs)[arg]
ysorted = np.array(ys)[arg]
esorted = np.array(errs)[arg]
for i in range(len(lsorted)):
marker_type = marker_map[marker_idx]
color_type = color_map[color_idx]
marker_idx += 1
color_idx += 1
ax.errorbar(x = xsorted[i], y = ysorted[i], yerr = esorted[i], xerr = None, label=lsorted[i], marker=marker_type, color=color_type, markersize=8)
plt.gcf().set_size_inches(23.4, 16.5)
ax.legend()
plt.savefig(fn, dpi=150)
plt.close()
def main():
datdir = None
options = getopt.getopt(sys.argv[1:], 'd:')[0]
for opt, arg in options:
if opt in ('-d'):
datdir = arg
if datdir == None:
raise Exception("Must specify -d parameter")
dat = dict()
ic.process_directory(datdir, dat)
tuples = set()
conf : ic.Conf
for conf in dat:
tuples.add((conf.server, conf.affinity))
for tup in tuples:
generate_graph(tup[0], tup[1], f"{datdir}/{tup[0]}_{tup[1]}.png", dat)
if __name__ == "__main__":
main()

View File

@ -1,4 +1,6 @@
import numpy as np
import os
import libpar as par
class Conf:
def __init__(self, server, clients, server_dat, clients_dat, clients_affinity, affinity, sendfile, tls, ktls, memloadgen, filepath, odirect):
@ -24,6 +26,42 @@ class Conf:
ret += f"_filepath.{self.filepath.replace('/','-')}_odirect.{self.odirect}"
return ret
def to_string_graph(self):
ret = f"affinity.{self.affinity}_mode.{self.tls}_memloadgen.{self.memloadgen}_fs.{self.filepath}"
return ret
def __hash__(self):
return hash((self.server, self.filepath, self.odirect, self.memloadgen, self.tls, self.sendfile, self.ktls, self.affinity))
def __eq__(self, other):
return (self.server, self.filepath, self.odirect, self.memloadgen, self.tls, self.sendfile, self.ktls, self.affinity) == \
(other.server, other.filepath, other.odirect, other.memloadgen, other.tls, other.sendfile, other.ktls, other.affinity)
def equal_except_memload(self, other):
return (self.server, self.filepath, self.odirect, self.tls, self.sendfile, self.ktls, self.affinity) == \
(other.server, other.filepath, other.odirect, other.tls, other.sendfile, other.ktls, other.affinity)
def __ne__(self, other):
# Not strictly necessary, but to avoid having both x==y and x!=y
# True at the same time
return not (self == other)
class ConfData:
def __init__(self, conf : Conf):
self.conf = conf
self.bps = []
self.memload = []
def add_run(self, qps : float, memload : int):
self.bps.append(qps)
self.memload.append(memload)
def get_bps(self) -> list[float]:
return self.bps
def get_memload(self) -> list[int]:
return self.memload
class ArgTypes:
def __init__(self):
self.all = [[]]
@ -111,3 +149,115 @@ def list_to_comma_delimited(list : list[any]):
ret += ","
ret += str(list[i])
return ret
def _enum_files(rootdir : str, ret : list[str]):
if os.path.isfile(rootdir):
ret.append(rootdir)
return
for subdir in os.listdir(rootdir):
each_dir = os.path.join(rootdir, subdir)
_enum_files(each_dir, ret)
def process_directory(dir: str, dat : dict[Conf, ConfData]):
if (("memloadgen" in dir) and ("sendfile" in dir)):
segments = os.path.basename(dir).split("_")
server = segments[0]
affinity = segments[1].split(".")[1]
sendfile = segments[2].split(".")[1]
tls = segments[3].split(".")[1]
ktls = segments[4].split(".")[1]
memloadgen = segments[5].split(".")[1]
fs = segments[6].split(".")[1]
# cleanup data
if int(affinity.split(",")[0]) <= 8:
if "2-int" in server:
affinity = "socket01"
else:
affinity = "socket00"
else:
if "2-int" in server:
affinity = "socket11"
else:
affinity = "socket10"
if "tmpfs" in fs:
fs = "tmpfs"
elif "nvdimm" in fs:
fs = "nvdimm"
elif "mnt" in fs:
fs = "ssd"
if memloadgen == "False":
memloadgen = "0"
elif memloadgen == "0":
memloadgen = "100"
if sendfile == "True" and tls == "True" and ktls == "True":
mode = "ktls+sendfile"
elif sendfile == "True" and tls == "False" and ktls == "False":
mode = "sendfile"
elif sendfile == "False" and tls == "False" and ktls == "False":
mode = "write"
elif sendfile == "False" and tls == "True" and ktls == "False":
mode = "tls"
elif sendfile == "False" and tls == "True" and ktls == "True":
mode = "ktls"
if "icelake" in server:
server = "icelake"
elif "milan" in server:
server = "milan"
conf : Conf = Conf(server, None, None, None, None, affinity, mode, mode, mode, memloadgen, fs, None)
# conf.server = server
# conf.affinity = affinity
# conf.sendfile = mode
# conf.tls = mode
# conf.ktls = mode
# conf.memloadgen = memloadgen
# conf.filepath = fs
# conf.odirect = ""
print("Processing category " + conf.to_string_graph() + " ... ")
memloadgen_bps = []
logs = []
_enum_files(dir, logs)
logs_bytes = []
for log in logs:
if log.endswith(".txt"):
with open(log, "r") as f:
buf = f.read()
if len(buf) > 0:
logs_bytes.append(buf)
else:
print("Warning: log file empty for " + log)
elif "memloadgen" in log:
with open(log, "r") as f:
buf = f.read()
if len(buf) > 0:
parser = par.memloadgen_parser(buf, 20, 30)
memloadgen_bps.append(parser.bps)
else:
print("Warning: memloadgen file empty for " + log)
try:
parser = par.iperf_json_parser(logs_bytes)
except Exception as e:
print("Warning: failed to parse " + log + ". err: " + str(e))
return
if not (conf in dat):
dat[conf] = ConfData(conf)
dat[conf].add_run(parser.aggregate_egress_bps, memloadgen_bps)
return
for subdir in os.listdir(dir):
each_dir = os.path.join(dir, subdir)
if not os.path.isfile(each_dir):
process_directory(each_dir, dat)