forked from hclhkbu/GaussianK-SGD
-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
139 lines (115 loc) · 3.43 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import hashlib
import time
import os
import numpy as np
import scipy.stats as stats
def gen_random_id():
id_ = hashlib.sha256()
id_.update(str(time.time()))
return id_.hexdigest()
def create_path(relative_path):
dirname = os.path.dirname(__file__)
filename = os.path.join(dirname, relative_path)
if not os.path.isdir(filename):
try:
#os.mkdir(filename)
os.makedirs(filename)
except:
pass
def update_fontsize(ax, fontsize=12.):
for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] +
ax.get_xticklabels() + ax.get_yticklabels()):
item.set_fontsize(fontsize)
def autolabel(rects, ax, label, rotation=90):
"""
Attach a text label above each bar displaying its height
"""
for rect in rects:
height = rect.get_y() + rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., 1.03*height,
label,
ha='center', va='bottom', rotation=rotation)
def topk(tensor, k):
indexes = np.abs(tensor).argsort()[-k:]
return indexes, tensor[indexes]
def get_approximate_sigma_scale(density):
sigma_scale = 1
if density > 0.7:
sigma_scale = 0.5
elif density <= 0.7 and density > 0.05:
sigma_scale = 1.5
elif density <= 0.05 and density > 0.01:
sigma_scale = 2.0
else:
sigma_scale = 3.0
return sigma_scale
def force_insert_item(d, key, val):
if key not in d:
d[key] = []
d[key].append(val)
s=2.18896957e-10 #P102-100
GbE_multi_p_ab_small = {
2: (1.6e-3, 1.0e-8),
4: (2.7e-3, 1.3e-8),
8: (4.0e-3, 1.5e-8),
16: (1.7e-3, 1.7e-8) # ImageNet
}
GbE_multi_p_ab_large = {
2: (4.4e-3, 5.8e-9),
4: (5.6e-3, 7.4e-9),
8: (7.68e-3, 8.2e-9),
16: (2.1e-3, 1.7e-8) # good for imagenet
}
tenGbE_multi_p_ab = {
2: (1.5e-5, 5.7e-11),
4: (3.6e-5, 1.1e-10),
8: (8.5e-5, 1.4e-10),
16: (1.4e-4, 2.0e-10)
}
def topk_perf_model(x, s=s):
"""
x is the number of parameters
Return: s * x * log2(x)
"""
if x == 0.0:
return 0.0
return s * x * np.log2(x)
def allgather_perf_model(x, P, density=0.001, eth='GbE'):
"""
x is the number of parameters
Return: t = a + b * x
"""
if x == 0:
return 0.0
size = x * P * 4 * density
if size >= 1024*1024:
multi_p_ab = GbE_multi_p_ab_large
else:
multi_p_ab = GbE_multi_p_ab_small
a, b = multi_p_ab[P]
return (a + b * size) * 2
def predict_density_with_size_and_computation(m, comp_time, P):
alpha = 4*0.436e-3
beta = 4*9e-6*1e-3
def _denseallreduce_model(P, m):
return 2*(P-1)*alpha + 2* (P-1)/P * m * beta
def _sparseallreduce_model(P, m, rho=0.001):
return np.log2(P) + 2 * (P - 1) * rho * m * beta
def _proper_rho_with_sparse_allreduce(P, m, comp_time):
rho = 0.001
t = comp_time - np.log2(P) * alpha
if t <= 0:
return rho
rho = t/ (2*(P-1)*beta*m)
if rho > 1.0:
rho = 0.05
rho = max(rho, 0.001)
return rho
return 0.001
def predict_allreduce_time_with_size(alpha, beta, size, P):
if size == 0:
return 0.0
return alpha + beta * size
def gen_threshold_from_normal_distribution(p_value, mu, sigma):
zvalue = stats.norm.ppf((1-p_value)/2)
return mu+zvalue*sigma, mu-zvalue*sigma