generated from opentensor/bittensor-subnet-template
-
Notifications
You must be signed in to change notification settings - Fork 40
/
config.yaml
94 lines (91 loc) · 2.59 KB
/
config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
gpu_performance:
GPU_TFLOPS_FP16:
NVIDIA H200: 610
NVIDIA H100 80GB HBM3: 570
NVIDIA H100: 330
NVIDIA A100-SXM4-80GB: 238.8
NVIDIA A100 80GB PCIe: 197
NVIDIA A100-SXM4-40GB: 257
NVIDIA L40s: 171
NVIDIA L40: 116
NVIDIA RTX 6000 Ada Generation: 112
NVIDIA RTX A6000: 48.5
NVIDIA RTX A5000: 60.0
NVIDIA RTX A4500: 44.0
NVIDIA RTX 4000 Ada Generation: 70
NVIDIA A40: 40.0
NVIDIA RTX 4090: 157
NVIDIA GeForce RTX 3090: 66.4
NVIDIA L4: 51
GPU_TFLOPS_FP32:
NVIDIA H200: 49.6
NVIDIA H100 80GB HBM3: 49.0
NVIDIA H100: 37.2
NVIDIA A100-SXM4-80GB: 18.2
NVIDIA A100 80GB PCIe: 16.9
NVIDIA A100-SXM4-40GB: 18.2
NVIDIA L40s: 35.5
NVIDIA L40: 27.0
NVIDIA RTX 6000 Ada Generation: 26.0
NVIDIA RTX A6000: 21.28
NVIDIA GeForce RTX 3090: 21.7
NVIDIA RTX A5000: 15.8
NVIDIA RTX A4500: 14.3
NVIDIA RTX 4000 Ada Generation: 14.6
NVIDIA A40: 22.8
NVIDIA RTX 4090: 48.5
NVIDIA L4: 9
GPU_AVRAM:
NVIDIA H200: 68.72
NVIDIA H100 80GB HBM3: 34.36
NVIDIA H100: 34.36
NVIDIA A100-SXM4-80GB: 34.36
NVIDIA A100 80GB PCIe: 34.36
NVIDIA A100-SXM4-40GB: 17.18
NVIDIA L40s: 17.18
NVIDIA L40: 17.18
NVIDIA RTX 6000 Ada Generation: 17.18
NVIDIA RTX A6000: 17.18
NVIDIA RTX A5000: 8.59
NVIDIA RTX A4500: 8.59
NVIDIA RTX 4000 Ada Generation: 8.59
NVIDIA A40: 17.18
NVIDIA RTX 4090: 8.59
NVIDIA GeForce RTX 3090: 8.59
NVIDIA L4: 8.59
gpu_tolerance_pairs:
NVIDIA L40: NVIDIA RTX 6000 Ada Generation
NVIDIA RTX 6000 Ada Generation: NVIDIA L40
NVIDIA RTX 4000 Ada Generation: NVIDIA RTX A5000
NVIDIA RTX A5000: NVIDIA RTX 4000 Ada Generation
NVIDIA A100 80GB PCIe: NVIDIA A100-SXM4-80GB
NVIDIA A100-SXM4-80GB: NVIDIA A100 80GB PCIe
NVIDIA H100 80GB HBM3: NVIDIA H100
NVIDIA H100: NVIDIA H100 80GB HBM3
NVIDIA A40: NVIDIA RTX A6000
NVIDIA RTX A6000: NVIDIA A40
gpu_scores:
NVIDIA H200: 4.0
NVIDIA H100 80GB HBM3: 3.30
NVIDIA H100: 2.80
NVIDIA A100-SXM4-80GB: 1.90
NVIDIA A100 80GB PCIe: 1.65
NVIDIA L40s: 1.10
NVIDIA RTX 6000 Ada Generation: 0.90
NVIDIA L40: 1.0
NVIDIA RTX A6000: 0.78
NVIDIA RTX 4090: 0.68
NVIDIA A40: 0.39
NVIDIA GeForce RTX 3090: 0.43
NVIDIA L4: 0.43
NVIDIA RTX A5000: 0.36
NVIDIA RTX A4500: 0.34
merkle_proof:
miner_script_path: "neurons/Validator/miner_script_m_merkletree.py"
time_tolerance: 5
submatrix_size: 512
hash_algorithm: 'sha256'
pog_retry_limit: 22
pog_retry_interval: 60 # seconds
max_workers: 64
max_random_delay: 900 # 900 seconds