Skip to content
This repository has been archived by the owner on Nov 19, 2022. It is now read-only.

Commit

Permalink
Add more benchmarking scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
shravanrn committed Dec 11, 2019
1 parent 07e12da commit d571c60
Show file tree
Hide file tree
Showing 11 changed files with 703 additions and 116 deletions.
34 changes: 21 additions & 13 deletions newAnalyzeCPUMem.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ def __init__(self, site):
self.site = site
self.cgmem = []
self.mem = []
self.webcmem = []
self.cpu0 = []
self.cpu1 = []
self.cpu2 = []
Expand All @@ -30,6 +31,8 @@ def add_cgmem(self, cgmem):
self.sites[self.current_site].cgmem.append(int(cgmem))
def add_mem(self, mem):
self.sites[self.current_site].mem.append(int(mem))
def add_webcmem(self, webcmem):
self.sites[self.current_site].webcmem.append(int(webcmem))
def add_cpu0(self, cpu0):
self.sites[self.current_site].cpu0.append(float(cpu0))
def add_cpu1(self, cpu1):
Expand All @@ -47,6 +50,9 @@ def handle_line(line, s):
elif 'CGMem: ' in line:
cgmem = line.split('CGMem: ')[1].strip()
s.add_cgmem(cgmem)
elif 'WebCMem: ' in line:
webcmem = line.split('WebCMem: ')[1].strip()
s.add_webcmem(webcmem)
elif 'Mem: ' in line:
mem = line.split('Mem: ')[1].strip()
s.add_mem(mem)
Expand Down Expand Up @@ -85,19 +91,21 @@ def print_final_results(s):
print('{')
print(' "site": "' + site + '",')

print(' "cgmems": ' + str(siteState.cgmem) + ',')
print(' "mems": ' + str(siteState.mem) + ',')
print(' "cpu0s": ' + str(siteState.cpu0) + ',')
print(' "cpu1s": ' + str(siteState.cpu1) + ',')
print(' "cpu2s": ' + str(siteState.cpu2) + ',')
print(' "cpu3s": ' + str(siteState.cpu3) + ',')

print(' "cgmem": ' + str(median(siteState.cgmem)) + ',')
print(' "mem": ' + str(median(siteState.mem)) + ',')
print(' "cpu0": ' + str(median(siteState.cpu0)) + ',')
print(' "cpu1": ' + str(median(siteState.cpu1)) + ',')
print(' "cpu2": ' + str(median(siteState.cpu2)) + ',')
print(' "cpu3": ' + str(median(siteState.cpu3)) )
print(' "cgmems": ' + str(siteState.cgmem) + ',')
print(' "mems": ' + str(siteState.mem) + ',')
print(' "webcmems": ' + str(siteState.webcmem) + ',')
print(' "cpu0s": ' + str(siteState.cpu0) + ',')
print(' "cpu1s": ' + str(siteState.cpu1) + ',')
print(' "cpu2s": ' + str(siteState.cpu2) + ',')
print(' "cpu3s": ' + str(siteState.cpu3) + ',')

print(' "cgmem": ' + str(median(siteState.cgmem)) + ',')
print(' "mem": ' + str(median(siteState.mem)) + ',')
print(' "webcmem": ' + str(median(siteState.webcmem)) + ',')
print(' "cpu0": ' + str(median(siteState.cpu0)) + ',')
print(' "cpu1": ' + str(median(siteState.cpu1)) + ',')
print(' "cpu2": ' + str(median(siteState.cpu2)) + ',')
print(' "cpu3": ' + str(median(siteState.cpu3)) )

print('}')
print("} }")
Expand Down
86 changes: 67 additions & 19 deletions newAnalyzeCPUMemPart2.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,66 @@ def read(folder, filename):
input1 = f.read()
return input1

def computeSummary(outfile, sites, parsed1, parsed2, parsed3):
with open(outfile, "w") as f:
cgmems_overhead_nacl = [ overhead(parsed2[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]
cgmems_overhead_ps = [ overhead(parsed3[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]
# cgmems_overhead_ps_mutex = [ overhead(parsed4[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]

f.write("Peak Mem nacl Overhead: " + str(average(cgmems_overhead_nacl)) + "\n")
f.write("Peak Mem ps Overhead: " + str(average(cgmems_overhead_ps)) + "\n")
# # f.write("Peak Mem ps_mutex Overhead: " + str(average(cgmems_overhead_ps_mutex)) + "\n")

mems_overhead_nacl = [ overhead(parsed2[site]["mem"], parsed1[site]["mem"]) for site in sites ]
mems_overhead_ps = [ overhead(parsed3[site]["mem"], parsed1[site]["mem"]) for site in sites ]
# mems_overhead_ps_mutex = [ overhead(parsed4[site]["mem"], parsed1[site]["mem"]) for site in sites ]

f.write("Mem nacl Overhead: " + str(average(mems_overhead_nacl)) + "\n")
f.write("Mem ps Overhead: " + str(average(mems_overhead_ps)) + "\n")
# # f.write("Mem ps_mutex Overhead: " + str(average(mems_overhead_ps_mutex)) + "\n")

primarycpu_overhead_nacl = [ overhead(parsed2[site]["cpu1"], parsed1[site]["cpu1"]) for site in sites ]
# primarycpu_overhead_ps_mutex = [ overhead(parsed4[site]["cpu3"], parsed1[site]["cpu1"]) for site in sites ]

primarycpu_overhead_ps = [ overhead(parsed3[site]["cpu1"], parsed1[site]["cpu1"]) for site in sites ]
secondarycpu_overhead_ps = [ additional_overhead(parsed3[site]["cpu3"], parsed1[site]["cpu1"]) for site in sites ]

f.write("CPU nacl Overhead: " + str(average(primarycpu_overhead_nacl)) + "\n")
f.write("CPU ps Overhead: " + str(average(primarycpu_overhead_ps)) + " + " + str(average(secondarycpu_overhead_ps)) + "\n")
# f.write("CPU ps_mutex Overhead: " + str(average(primarycpu_overhead_ps_mutex)) + "\n")

def computeIndividual(outfile, sites, parsed1, parsed2, parsed3):
with open(outfile, "w") as f:
for site in sites:
f.write("Site: " + site + "\n")
cgmems_overhead_nacl = overhead(parsed2[site]["cgmem"], parsed1[site]["cgmem"])
cgmems_overhead_ps = overhead(parsed3[site]["cgmem"], parsed1[site]["cgmem"])
# cgmems_overhead_ps_mutex = overhead(parsed4[site]["cgmem"], parsed1[site]["cgmem"])

f.write("Peak Mem nacl Overhead: " + str(cgmems_overhead_nacl) + "\n")
f.write("Peak Mem ps Overhead: " + str(cgmems_overhead_ps) + "\n")
# # f.write("Peak Mem ps_mutex Overhead: " + str(cgmems_overhead_ps_mutex) + "\n")

mems_overhead_nacl = overhead(parsed2[site]["mem"], parsed1[site]["mem"])
mems_overhead_ps = overhead(parsed3[site]["mem"], parsed1[site]["mem"])
# mems_overhead_ps_mutex = overhead(parsed4[site]["mem"], parsed1[site]["mem"])

f.write("Mem nacl Overhead: " + str(mems_overhead_nacl) + "\n")
f.write("Mem ps Overhead: " + str(mems_overhead_ps) + "\n")
# # f.write("Mem ps_mutex Overhead: " + str(average(mems_overhead_ps_mutex)) + "\n")

primarycpu_overhead_nacl = overhead(parsed2[site]["cpu1"], parsed1[site]["cpu1"])
# primarycpu_overhead_ps_mutex = overhead(parsed4[site]["cpu3"], parsed1[site]["cpu1"])

primarycpu_overhead_ps = overhead(parsed3[site]["cpu1"], parsed1[site]["cpu1"])
secondarycpu_overhead_ps = additional_overhead(parsed3[site]["cpu3"], parsed1[site]["cpu1"])

f.write("CPU nacl Overhead: " + str(primarycpu_overhead_nacl) + "\n")
f.write("CPU ps Overhead: " + str(primarycpu_overhead_ps) + " + " + str(secondarycpu_overhead_ps) + "\n")
# f.write("CPU ps_mutex Overhead: " + str(primarycpu_overhead_ps_mutex) + "\n")

f.write("\n")

def main():
if len(sys.argv) < 2:
print("Expected " + sys.argv[0] + " inputFolderName")
Expand All @@ -34,33 +94,21 @@ def main():
input1 = read(inputFolderName, "static_stock_cpu_mem_analysis.json")
input2 = read(inputFolderName, "new_nacl_cpp_cpu_mem_analysis.json")
input3 = read(inputFolderName, "new_ps_cpp_cpu_mem_analysis.json")
input4 = read(inputFolderName, "new_ps_cpp_mutex_cpu_mem_analysis.json")
# input4 = read(inputFolderName, "new_ps_cpp_mutex_cpu_mem_analysis.json")

parsed1 = json.loads(input1)["data"]
parsed2 = json.loads(input2)["data"]
parsed3 = json.loads(input3)["data"]
parsed4 = json.loads(input4)["data"]
# parsed4 = json.loads(input4)["data"]

sites = [parsed for parsed in parsed1]
sites.remove("about:blank")

cgmems_overhead_nacl = [ overhead(parsed2[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]
cgmems_overhead_ps = [ overhead(parsed3[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]
cgmems_overhead_ps_mutex = [ overhead(parsed4[site]["cgmem"], parsed1[site]["cgmem"]) for site in sites ]

print("Mem nacl Overhead: " + str(average(cgmems_overhead_nacl)))
print("Mem ps Overhead: " + str(average(cgmems_overhead_ps)))
print("Mem ps_mutex Overhead: " + str(average(cgmems_overhead_ps_mutex)))

primarycpu_overhead_nacl = [ overhead(parsed2[site]["cpu1"], parsed1[site]["cpu1"]) for site in sites ]
primarycpu_overhead_ps_mutex = [ overhead(parsed4[site]["cpu3"], parsed1[site]["cpu1"]) for site in sites ]

primarycpu_overhead_ps = [ overhead(parsed3[site]["cpu1"], parsed1[site]["cpu1"]) for site in sites ]
secondarycpu_overhead_ps = [ additional_overhead(parsed3[site]["cpu3"], parsed1[site]["cpu1"]) for site in sites ]
summaryFile = os.path.join(inputFolderName, "analysis.txt")
computeSummary(summaryFile, sites, parsed1, parsed2, parsed3)

print("CPU nacl Overhead: " + str(average(primarycpu_overhead_nacl)))
print("CPU ps Overhead: " + str(average(primarycpu_overhead_ps)) + " + " + str(average(secondarycpu_overhead_ps)))
print("CPU ps_mutex Overhead: " + str(average(primarycpu_overhead_ps_mutex)))
individualFile = os.path.join(inputFolderName, "analysis_individual.txt")
computeIndividual(individualFile, sites, parsed1, parsed2, parsed3)


main()
main()
107 changes: 107 additions & 0 deletions newAnalyzeMacroPerf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
#!/usr/bin/env python3

import os
import sys
import simplejson as json
from statistics import mean, median, stdev

def read(folder, filename):
inputFileName1 = os.path.join(folder, filename)
with open(inputFileName1) as f:
input1 = f.read()
return input1

def readJson(folder, filename):
data = read(folder, filename)
return json.loads(data)

def readJsonTestValue(folder, filename):
input = readJson(folder, filename)
ret = input["suites"][0]["subtests"][0]["value"]
return ret

def writeJson(folder, filename, obj):
str = json.dumps(obj, indent=4)
full_file = os.path.join(folder, filename)
with open(full_file, "w") as text_file:
text_file.write("%s\n" % str)

def writeMetrics(fileobj, label, arr):
fileobj.write("Mean {}: {}\n".format(label, mean(arr)))
fileobj.write("Median {}: {}\n".format(label, median(arr)))
fileobj.write("Std_Dev {}: {}\n".format(label, stdev(arr)))

def main():
if len(sys.argv) < 2:
print("Expected " + sys.argv[0] + " inputFolderName")
exit(1)
inputFolderName = sys.argv[1]

builds = {
"stock" : "static_stock_external_page_render{}",
"sfi" : "new_nacl_cpp_external_page_render{}",
"psspin" : "new_ps_cpp_external_page_render{}"
}

other_builds = [build for build in builds.keys()]
other_builds.remove("stock")

sites = [
"google.com",
"yelp.com",
"eurosport.com",
"legacy.com",
"reddit.com",
"seatguru.com",
"twitch.tv",
"amazon.com",
"economist.com",
"espn.com",
"wowprogress.com"
]

latency_output = {}
for build, build_output in builds.items():
latency_output[build] = []
for i in range(1, len(sites)):
siteobj = {
"name" : sites[i],
"value" : readJsonTestValue(inputFolderName, build_output.format(str(i) + ".json"))
}
latency_output[build].append(siteobj)

writeJson(inputFolderName, "page_latency.json", latency_output)

with open(os.path.join(inputFolderName, "page_latency_metrics.txt"), "w") as text_file:
for build in other_builds:
overheads = []
for i in range(1, len(sites)):
stock_val = readJsonTestValue(inputFolderName, builds["stock"].format(str(i) + ".json"))
build_val = readJsonTestValue(inputFolderName, builds[build].format(str(i) + ".json"))
overhead = 100.0 * ((build_val/stock_val) - 1.0)
overheads.append(overhead)
writeMetrics(text_file, "{} latency".format(build), overheads)

memory_output = {}
for build, build_output in builds.items():
memory_output[build] = []
for i in range(1, len(sites)):
siteobj = {
"name" : sites[i],
"value" : int(read(inputFolderName, build_output.format(str(i) + "_mem.txt")))
}
memory_output[build].append(siteobj)

writeJson(inputFolderName, "page_memory_overhead.json", memory_output)

with open(os.path.join(inputFolderName, "page_memory_overhead_metrics.txt"), "w") as text_file:
for build in other_builds:
overheads = []
for i in range(1, len(sites)):
stock_val = int(read(inputFolderName, builds["stock"].format(str(i) + "_mem.txt")))
build_val = int(read(inputFolderName, builds[build].format(str(i) + "_mem.txt")))
overhead = 100.0 * ((build_val/stock_val) - 1.0)
overheads.append(overhead)
writeMetrics(text_file, "{} memory overhead".format(build), overheads)

main()
51 changes: 51 additions & 0 deletions newAnalyzeMicroImagesPart2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/env python3

import math
import sys
import csv
import os
from urllib.parse import urlparse
import simplejson as json

def getMedian(els, group):
for el in els:
if group in el["Group"]:
return float(el["Median"].replace(',', ''))
sys.exit("Unreachable")

def computeSummary(summaryFile, ext, parsed1, parsed2, parsed3):
with open(summaryFile, "w") as f:
writer = csv.writer(f)
writer.writerow(["Image", "SFI sandbox", "Process sandbox"])
for qual in ["best", "default", "none"]:
for res, label in {"1920" : "\\n1280p", "480" : "{0}\\n320p", "240" : "\\n135p"}.items():
group_suffix = qual + "_" + res + "." + ext
stock_val = getMedian(parsed1, group_suffix)
nacl_val = getMedian(parsed2, group_suffix)
ps_val = getMedian(parsed3, group_suffix)
writer.writerow([ label.replace("{0}", qual), nacl_val/stock_val, ps_val/stock_val ])

def read(folder, filename):
inputFileName1 = os.path.join(folder, filename)
with open(inputFileName1) as f:
input1 = f.read()
return input1

def main():
if len(sys.argv) < 2:
print("Expected " + sys.argv[0] + " inputFolderName")
exit(1)
inputFolderName = sys.argv[1]

input1 = read(inputFolderName, "static_stock_terminal_analysis.json")
input2 = read(inputFolderName, "new_nacl_cpp_terminal_analysis.json")
input3 = read(inputFolderName, "new_ps_cpp_terminal_analysis.json")

parsed1 = json.loads(input1)["data"]
parsed2 = json.loads(input2)["data"]
parsed3 = json.loads(input3)["data"]

computeSummary(os.path.join(inputFolderName, "jpeg_perf.dat"), "jpeg", parsed1, parsed2, parsed3)
computeSummary(os.path.join(inputFolderName, "png_perf.dat") , "png" , parsed1, parsed2, parsed3)

main()
36 changes: 3 additions & 33 deletions newAnalyzePerf.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,12 @@
import csv
import os
from urllib.parse import urlparse

def is_line_required(line):
if (
line.startswith("FFBuild") or
("\"name\"" in line) or
("\"value\"" in line) or
("Final JPEG_Time" in line) or
("Final PNG_Time" in line)
):
if "INFO -" not in line:
return True
return False
from statistics import mean, median, stdev

class PerfState:
def __init__(self):
self.timings = {}


def addTestValue(s, group, val):
if group not in s.timings:
s.timings[group] = []
Expand All @@ -42,23 +30,6 @@ def handle_line(line, s, skipFirstHost):
addTestValue(s, group, val)
return s


def average(list):
n = len(list)
if n < 1:
return 0
return float(sum(list))/n


def median(list):
n = len(list)
if n < 1:
return 0
if n % 2 == 1:
return sorted(list)[n//2]
else:
return sum(sorted(list)[n//2-1:n//2+1])/2.0

def print_final_results(s):
print('{ "data" : [')
first = True
Expand All @@ -75,11 +46,10 @@ def print_final_results(s):
timeList = times[5:] #filter first 5
print(' "Filtered Times": ' + str(timeList) + ",")

avg = average(timeList)
avg = mean(timeList)
print(' "Average": "' + str("{0:,.2f}".format(avg)) + '",')

variance = list(map(lambda x: (x - avg)**2, timeList))
stdDev = math.sqrt(average(variance))
stdDev = stdev(timeList)
print(' "StdDev": "' + str("{0:,.2f}".format(stdDev)) + '",')

m = median(timeList)
Expand Down
Loading

0 comments on commit d571c60

Please sign in to comment.