From cd07bfaf737306df21576be3b3ec6eae6fe490f2 Mon Sep 17 00:00:00 2001 From: Fabrice Normandin Date: Tue, 26 Nov 2024 13:59:14 -0500 Subject: [PATCH] Adjust regression tests (again) Signed-off-by: Fabrice Normandin --- .../llm_finetuning.yaml | 3096 ++++++++--------- .../cuda/llm_finetuning.yaml | 404 +-- 2 files changed, 1750 insertions(+), 1750 deletions(-) diff --git a/.regression_files/project/algorithms/llm_finetuning_test/test_backward_pass_is_reproducible/llm_finetuning.yaml b/.regression_files/project/algorithms/llm_finetuning_test/test_backward_pass_is_reproducible/llm_finetuning.yaml index 5f80c367..e1932620 100644 --- a/.regression_files/project/algorithms/llm_finetuning_test/test_backward_pass_is_reproducible/llm_finetuning.yaml +++ b/.regression_files/project/algorithms/llm_finetuning_test/test_backward_pass_is_reproducible/llm_finetuning.yaml @@ -10,3277 +10,3277 @@ batch.attention_mask: batch.input_ids: device: cuda:0 max: 50118 - mean: '5.265e+03' + mean: '5.447e+03' min: 2 shape: - 8 - 256 - sum: 10781837 + sum: 11154886 batch.labels: device: cuda:0 max: 50118 - mean: '5.265e+03' + mean: '5.447e+03' min: 2 shape: - 8 - 256 - sum: 10781837 + sum: 11154886 grads.network.model.decoder.embed_positions.weight: device: cuda:0 - max: '2.625e-02' - mean: '1.182e-07' - min: '-2.448e-02' + max: '2.549e-02' + mean: '2.795e-07' + min: '-2.530e-02' shape: - 2050 - 1024 - sum: '2.482e-01' + sum: '5.867e-01' grads.network.model.decoder.embed_tokens.weight: device: cuda:0 - max: '7.352e-01' - mean: '-1.859e-07' - min: '-9.014e-01' + max: '7.65e-01' + mean: '-2.928e-07' + min: '-9.832e-01' shape: - 50272 - 512 - sum: '-4.786e+00' + sum: '-7.537e+00' grads.network.model.decoder.layers.0.fc1.bias: device: cuda:0 - max: '2.674e-03' - mean: '2.379e-07' - min: '-6.869e-03' + max: '2.624e-03' + mean: '-2.445e-06' + min: '-8.882e-03' shape: - 4096 - sum: '9.743e-04' + sum: '-1.001e-02' grads.network.model.decoder.layers.0.fc1.weight: device: cuda:0 - max: '9.024e-02' - mean: '-4.828e-10' - min: '-1.327e-01' + max: '8.724e-02' + mean: '4.963e-09' + min: '-1.222e-01' shape: - 4096 - 1024 - sum: '-2.025e-03' + sum: '2.082e-02' grads.network.model.decoder.layers.0.fc2.bias: device: cuda:0 - max: '8.25e-03' - mean: '1.455e-11' - min: '-8.836e-03' + max: '1.031e-02' + mean: '7.276e-12' + min: '-1.265e-02' shape: - 1024 - sum: '1.490e-08' + sum: '7.451e-09' grads.network.model.decoder.layers.0.fc2.weight: device: cuda:0 - max: '1.270e-02' - mean: '5.684e-14' - min: '-1.145e-02' + max: '1.836e-02' + mean: '0.e+00' + min: '-1.480e-02' shape: - 1024 - 4096 - sum: '2.384e-07' + sum: '0.e+00' grads.network.model.decoder.layers.0.final_layer_norm.bias: device: cuda:0 - max: '8.875e-03' - mean: '-1.687e-06' - min: '-9.341e-03' + max: '1.124e-02' + mean: '2.244e-06' + min: '-1.343e-02' shape: - 1024 - sum: '-1.728e-03' + sum: '2.298e-03' grads.network.model.decoder.layers.0.final_layer_norm.weight: device: cuda:0 - max: '1.644e-02' - mean: '-9.44e-06' - min: '-9.016e-03' + max: '9.238e-03' + mean: '-1.765e-05' + min: '-5.406e-02' shape: - 1024 - sum: '-9.666e-03' + sum: '-1.807e-02' grads.network.model.decoder.layers.0.self_attn.k_proj.bias: device: cuda:0 - max: '6.366e-11' - mean: '2.163e-13' - min: '-8.458e-11' + max: '1.455e-10' + mean: '1.036e-12' + min: '-1.673e-10' shape: - 1024 - sum: '2.215e-10' + sum: '1.061e-09' grads.network.model.decoder.layers.0.self_attn.k_proj.weight: device: cuda:0 - max: '1.611e-04' - mean: '4.242e-09' - min: '-1.314e-04' + max: '1.895e-04' + mean: '6.07e-11' + min: '-1.679e-04' shape: - 1024 - 1024 - sum: '4.448e-03' + sum: '6.365e-05' grads.network.model.decoder.layers.0.self_attn.out_proj.bias: device: cuda:0 - max: '1.969e-01' - mean: '1.164e-10' - min: '-2.229e-01' + max: '2.459e-01' + mean: '-8.149e-10' + min: '-2.594e-01' shape: - 1024 - sum: '1.192e-07' + sum: '-8.345e-07' grads.network.model.decoder.layers.0.self_attn.out_proj.weight: device: cuda:0 - max: '8.329e-03' - mean: '-6.750e-14' - min: '-7.267e-03' + max: '7.433e-03' + mean: '1.705e-13' + min: '-7.011e-03' shape: - 1024 - 1024 - sum: '-7.078e-08' + sum: '1.788e-07' grads.network.model.decoder.layers.0.self_attn.q_proj.bias: device: cuda:0 - max: '3.655e-04' - mean: '1.504e-07' - min: '-4.036e-04' + max: '4.872e-04' + mean: '3.458e-07' + min: '-5.13e-04' shape: - 1024 - sum: '1.54e-04' + sum: '3.541e-04' grads.network.model.decoder.layers.0.self_attn.q_proj.weight: device: cuda:0 - max: '3.66e-04' - mean: '4.723e-09' - min: '-3.944e-04' + max: '3.873e-04' + mean: '3.472e-09' + min: '-4.093e-04' shape: - 1024 - 1024 - sum: '4.953e-03' + sum: '3.641e-03' grads.network.model.decoder.layers.0.self_attn.v_proj.bias: device: cuda:0 - max: '1.332e-01' - mean: '6.213e-04' - min: '-1.3e-01' + max: '1.222e-01' + mean: '5.112e-04' + min: '-1.374e-01' shape: - 1024 - sum: '6.362e-01' + sum: '5.235e-01' grads.network.model.decoder.layers.0.self_attn.v_proj.weight: device: cuda:0 - max: '1.111e-01' - mean: '3.644e-07' - min: '-7.994e-02' + max: '7.942e-02' + mean: '3.069e-07' + min: '-7.008e-02' shape: - 1024 - 1024 - sum: '3.821e-01' + sum: '3.218e-01' grads.network.model.decoder.layers.0.self_attn_layer_norm.bias: device: cuda:0 - max: '8.891e-03' - mean: '-1.263e-05' - min: '-1.024e-02' + max: '1.182e-02' + mean: '-1.809e-05' + min: '-1.26e-02' shape: - 1024 - sum: '-1.293e-02' + sum: '-1.852e-02' grads.network.model.decoder.layers.0.self_attn_layer_norm.weight: device: cuda:0 - max: '1.566e-02' - mean: '3.934e-06' - min: '-9.343e-03' + max: '9.642e-03' + mean: '-9.916e-07' + min: '-4.965e-02' shape: - 1024 - sum: '4.028e-03' + sum: '-1.015e-03' grads.network.model.decoder.layers.1.fc1.bias: device: cuda:0 - max: '3.689e-03' - mean: '1.177e-06' - min: '-4.497e-03' + max: '5.562e-03' + mean: '-1.470e-06' + min: '-7.369e-03' shape: - 4096 - sum: '4.822e-03' + sum: '-6.023e-03' grads.network.model.decoder.layers.1.fc1.weight: device: cuda:0 - max: '6.621e-02' - mean: '-2.389e-09' - min: '-8.067e-02' + max: '6.877e-02' + mean: '2.984e-09' + min: '-9.409e-02' shape: - 4096 - 1024 - sum: '-1.002e-02' + sum: '1.251e-02' grads.network.model.decoder.layers.1.fc2.bias: device: cuda:0 - max: '9.095e-03' - mean: '1.455e-11' - min: '-9.3e-03' + max: '1.038e-02' + mean: '1.819e-11' + min: '-1.155e-02' shape: - 1024 - sum: '1.490e-08' + sum: '1.863e-08' grads.network.model.decoder.layers.1.fc2.weight: device: cuda:0 - max: '1.008e-02' - mean: '2.274e-13' - min: '-8.904e-03' + max: '1.431e-02' + mean: '2.558e-13' + min: '-1.138e-02' shape: - 1024 - 4096 - sum: '9.537e-07' + sum: '1.073e-06' grads.network.model.decoder.layers.1.final_layer_norm.bias: device: cuda:0 - max: '1.036e-02' - mean: '-5.957e-05' - min: '-1.051e-02' + max: '1.17e-02' + mean: '-9.708e-05' + min: '-1.293e-02' shape: - 1024 - sum: '-6.100e-02' + sum: '-9.941e-02' grads.network.model.decoder.layers.1.final_layer_norm.weight: device: cuda:0 - max: '1.518e-02' - mean: '7.308e-06' - min: '-8.499e-03' + max: '1.304e-02' + mean: '1.814e-05' + min: '-3.518e-02' shape: - 1024 - sum: '7.484e-03' + sum: '1.858e-02' grads.network.model.decoder.layers.1.self_attn.k_proj.bias: device: cuda:0 - max: '4.657e-10' - mean: '-2.025e-12' - min: '-4.657e-10' + max: '6.403e-10' + mean: '6.279e-13' + min: '-1.397e-09' shape: - 1024 - sum: '-2.074e-09' + sum: '6.430e-10' grads.network.model.decoder.layers.1.self_attn.k_proj.weight: device: cuda:0 - max: '2.842e-02' - mean: '-1.398e-13' - min: '-2.796e-02' + max: '3.312e-02' + mean: '3.22e-15' + min: '-3.174e-02' shape: - 1024 - 1024 - sum: '-1.466e-07' + sum: '3.376e-09' grads.network.model.decoder.layers.1.self_attn.out_proj.bias: device: cuda:0 - max: '8.429e-03' - mean: '-1.819e-11' - min: '-8.021e-03' + max: '9.799e-03' + mean: '2.183e-11' + min: '-1.048e-02' shape: - 1024 - sum: '-1.863e-08' + sum: '2.235e-08' grads.network.model.decoder.layers.1.self_attn.out_proj.weight: device: cuda:0 - max: '9.25e-03' + max: '1.020e-02' mean: '-1.705e-13' - min: '-7.668e-03' + min: '-1.033e-02' shape: - 1024 - 1024 sum: '-1.788e-07' grads.network.model.decoder.layers.1.self_attn.q_proj.bias: device: cuda:0 - max: '1.053e-03' - mean: '2.244e-06' - min: '-1.048e-03' + max: '1.236e-03' + mean: '-3.821e-06' + min: '-2.06e-03' shape: - 1024 - sum: '2.298e-03' + sum: '-3.913e-03' grads.network.model.decoder.layers.1.self_attn.q_proj.weight: device: cuda:0 - max: '1.471e-02' - mean: '1.574e-08' - min: '-2.064e-02' + max: '1.833e-02' + mean: '-2.680e-08' + min: '-1.194e-02' shape: - 1024 - 1024 - sum: '1.651e-02' + sum: '-2.811e-02' grads.network.model.decoder.layers.1.self_attn.v_proj.bias: device: cuda:0 - max: '6.922e-03' - mean: '7.232e-05' - min: '-5.205e-03' + max: '1.296e-02' + mean: '1.047e-04' + min: '-9.251e-03' shape: - 1024 - sum: '7.405e-02' + sum: '1.072e-01' grads.network.model.decoder.layers.1.self_attn.v_proj.weight: device: cuda:0 - max: '1.085e-01' - mean: '5.073e-07' - min: '-7.548e-02' + max: '2.234e-01' + mean: '7.347e-07' + min: '-1.650e-01' shape: - 1024 - 1024 - sum: '5.319e-01' + sum: '7.704e-01' grads.network.model.decoder.layers.1.self_attn_layer_norm.bias: device: cuda:0 - max: '8.596e-03' - mean: '-3.698e-05' - min: '-8.267e-03' + max: '1.000e-02' + mean: '-4.235e-05' + min: '-1.078e-02' shape: - 1024 - sum: '-3.787e-02' + sum: '-4.337e-02' grads.network.model.decoder.layers.1.self_attn_layer_norm.weight: device: cuda:0 - max: '1.314e-02' - mean: '3.398e-06' - min: '-8.47e-03' + max: '1.163e-02' + mean: '5.549e-06' + min: '-3.955e-02' shape: - 1024 - sum: '3.48e-03' + sum: '5.682e-03' grads.network.model.decoder.layers.10.fc1.bias: device: cuda:0 - max: '7.667e-03' - mean: '-8.035e-06' - min: '-4.570e-03' + max: '1.167e-02' + mean: '-1.093e-05' + min: '-4.407e-03' shape: - 4096 - sum: '-3.291e-02' + sum: '-4.475e-02' grads.network.model.decoder.layers.10.fc1.weight: device: cuda:0 - max: '1.337e-01' - mean: '-9.547e-09' - min: '-1.268e-01' + max: '1.255e-01' + mean: '-1.298e-08' + min: '-2.335e-01' shape: - 4096 - 1024 - sum: '-4.004e-02' + sum: '-5.445e-02' grads.network.model.decoder.layers.10.fc2.bias: device: cuda:0 - max: '1.046e-02' - mean: '1.455e-11' - min: '-8.283e-03' + max: '9.324e-03' + mean: '3.638e-12' + min: '-9.376e-03' shape: - 1024 - sum: '1.490e-08' + sum: '3.725e-09' grads.network.model.decoder.layers.10.fc2.weight: device: cuda:0 - max: '2.365e-02' - mean: '7.39e-13' - min: '-2.015e-02' + max: '1.888e-02' + mean: '1.137e-13' + min: '-1.95e-02' shape: - 1024 - 4096 - sum: '3.099e-06' + sum: '4.768e-07' grads.network.model.decoder.layers.10.final_layer_norm.bias: device: cuda:0 - max: '1.175e-02' - mean: '3.312e-05' - min: '-9.410e-03' + max: '1.063e-02' + mean: '1.763e-04' + min: '-1.049e-02' shape: - 1024 - sum: '3.392e-02' + sum: '1.805e-01' grads.network.model.decoder.layers.10.final_layer_norm.weight: device: cuda:0 - max: '1.716e-02' - mean: '1.21e-05' - min: '-2.542e-02' + max: '1.245e-02' + mean: '1.566e-05' + min: '-1.95e-02' shape: - 1024 - sum: '1.239e-02' + sum: '1.604e-02' grads.network.model.decoder.layers.10.self_attn.k_proj.bias: device: cuda:0 - max: '1.339e-09' - mean: '1.047e-12' - min: '-1.048e-09' + max: '1.863e-09' + mean: '-8.787e-12' + min: '-1.164e-09' shape: - 1024 - sum: '1.072e-09' + sum: '-8.998e-09' grads.network.model.decoder.layers.10.self_attn.k_proj.weight: device: cuda:0 - max: '1.012e-01' - mean: '-4.586e-13' - min: '-1.059e-01' + max: '1.065e-01' + mean: '1.164e-13' + min: '-1.330e-01' shape: - 1024 - 1024 - sum: '-4.809e-07' + sum: '1.220e-07' grads.network.model.decoder.layers.10.self_attn.out_proj.bias: device: cuda:0 - max: '9.373e-03' - mean: '-3.638e-12' - min: '-7.985e-03' + max: '8.365e-03' + mean: '1.819e-11' + min: '-8.918e-03' shape: - 1024 - sum: '-3.725e-09' + sum: '1.863e-08' grads.network.model.decoder.layers.10.self_attn.out_proj.weight: device: cuda:0 - max: '6.620e-03' - mean: '-1.421e-14' - min: '-7.378e-03' + max: '7.876e-03' + mean: '3.126e-13' + min: '-7.644e-03' shape: - 1024 - 1024 - sum: '-1.490e-08' + sum: '3.278e-07' grads.network.model.decoder.layers.10.self_attn.q_proj.bias: device: cuda:0 - max: '4.476e-03' - mean: '-1.281e-05' - min: '-4.059e-03' + max: '3.907e-03' + mean: '-1.607e-05' + min: '-4.692e-03' shape: - 1024 - sum: '-1.311e-02' + sum: '-1.645e-02' grads.network.model.decoder.layers.10.self_attn.q_proj.weight: device: cuda:0 - max: '3.848e-02' - mean: '1.029e-07' - min: '-3.876e-02' + max: '3.358e-02' + mean: '1.291e-07' + min: '-4.45e-02' shape: - 1024 - 1024 - sum: '1.079e-01' + sum: '1.354e-01' grads.network.model.decoder.layers.10.self_attn.v_proj.bias: device: cuda:0 - max: '1.095e-02' - mean: '-4.351e-05' - min: '-1.044e-02' + max: '9.312e-03' + mean: '-8.616e-05' + min: '-9.148e-03' shape: - 1024 - sum: '-4.456e-02' + sum: '-8.822e-02' grads.network.model.decoder.layers.10.self_attn.v_proj.weight: device: cuda:0 - max: '3.115e-01' - mean: '3.496e-07' - min: '-3.515e-01' + max: '2.466e-01' + mean: '6.922e-07' + min: '-2.438e-01' shape: - 1024 - 1024 - sum: '3.666e-01' + sum: '7.259e-01' grads.network.model.decoder.layers.10.self_attn_layer_norm.bias: device: cuda:0 - max: '9.663e-03' - mean: '-1.711e-05' - min: '-8.243e-03' + max: '8.563e-03' + mean: '-2.205e-05' + min: '-9.231e-03' shape: - 1024 - sum: '-1.752e-02' + sum: '-2.258e-02' grads.network.model.decoder.layers.10.self_attn_layer_norm.weight: device: cuda:0 - max: '1.521e-02' - mean: '9.650e-06' - min: '-3.063e-02' + max: '1.004e-02' + mean: '8.82e-06' + min: '-2.064e-02' shape: - 1024 - sum: '9.882e-03' + sum: '9.032e-03' grads.network.model.decoder.layers.11.fc1.bias: device: cuda:0 - max: '8.889e-03' - mean: '-1.153e-05' - min: '-5.87e-03' + max: '4.537e-03' + mean: '-1.97e-05' + min: '-1.077e-02' shape: - 4096 - sum: '-4.722e-02' + sum: '-8.069e-02' grads.network.model.decoder.layers.11.fc1.weight: device: cuda:0 - max: '1.453e-01' - mean: '-4.738e-08' - min: '-1.045e-01' + max: '1.921e-01' + mean: '-8.097e-08' + min: '-1.258e-01' shape: - 4096 - 1024 - sum: '-1.987e-01' + sum: '-3.396e-01' grads.network.model.decoder.layers.11.fc2.bias: device: cuda:0 - max: '1.02e-02' - mean: '2.183e-11' - min: '-1.248e-02' + max: '9.747e-03' + mean: '0.e+00' + min: '-1.146e-02' shape: - 1024 - sum: '2.235e-08' + sum: '0.e+00' grads.network.model.decoder.layers.11.fc2.weight: device: cuda:0 - max: '2.754e-02' - mean: '2.842e-14' - min: '-3.209e-02' + max: '2.297e-02' + mean: '-2.274e-13' + min: '-2.611e-02' shape: - 1024 - 4096 - sum: '1.192e-07' + sum: '-9.537e-07' grads.network.model.decoder.layers.11.final_layer_norm.bias: device: cuda:0 - max: '1.19e-02' - mean: '-1.716e-04' - min: '-1.404e-02' + max: '1.074e-02' + mean: '-1.697e-04' + min: '-1.309e-02' shape: - 1024 - sum: '-1.757e-01' + sum: '-1.738e-01' grads.network.model.decoder.layers.11.final_layer_norm.weight: device: cuda:0 - max: '5.003e-02' - mean: '-2.055e-05' - min: '-1.019e-02' + max: '4.611e-02' + mean: '-1.405e-05' + min: '-1.679e-02' shape: - 1024 - sum: '-2.105e-02' + sum: '-1.439e-02' grads.network.model.decoder.layers.11.self_attn.k_proj.bias: device: cuda:0 - max: '3.856e-10' - mean: '-5.496e-13' - min: '-4.620e-10' + max: '4.075e-10' + mean: '3.897e-12' + min: '-5.239e-10' shape: - 1024 - sum: '-5.627e-10' + sum: '3.990e-09' grads.network.model.decoder.layers.11.self_attn.k_proj.weight: device: cuda:0 - max: '3.321e-02' - mean: '4.019e-14' - min: '-4.012e-02' + max: '3.695e-02' + mean: '-2.855e-13' + min: '-3.176e-02' shape: - 1024 - 1024 - sum: '4.214e-08' + sum: '-2.994e-07' grads.network.model.decoder.layers.11.self_attn.out_proj.bias: device: cuda:0 - max: '1.007e-02' - mean: '2.910e-11' - min: '-1.045e-02' + max: '1.050e-02' + mean: '1.819e-12' + min: '-1.04e-02' shape: - 1024 - sum: '2.980e-08' + sum: '1.863e-09' grads.network.model.decoder.layers.11.self_attn.out_proj.weight: device: cuda:0 - max: '4.290e-03' - mean: '-1.776e-14' - min: '-3.304e-03' + max: '4.005e-03' + mean: '-4.619e-14' + min: '-3.44e-03' shape: - 1024 - 1024 - sum: '-1.863e-08' + sum: '-4.843e-08' grads.network.model.decoder.layers.11.self_attn.q_proj.bias: device: cuda:0 - max: '2.271e-03' - mean: '-1.107e-05' - min: '-1.759e-03' + max: '1.21e-03' + mean: '-1.349e-05' + min: '-2.133e-03' shape: - 1024 - sum: '-1.134e-02' + sum: '-1.382e-02' grads.network.model.decoder.layers.11.self_attn.q_proj.weight: device: cuda:0 - max: '1.855e-02' - mean: '1.038e-07' - min: '-1.807e-02' + max: '2.495e-02' + mean: '1.265e-07' + min: '-2.483e-02' shape: - 1024 - 1024 - sum: '1.088e-01' + sum: '1.326e-01' grads.network.model.decoder.layers.11.self_attn.v_proj.bias: device: cuda:0 - max: '7.478e-03' - mean: '-6.482e-05' - min: '-1.279e-02' + max: '9.094e-03' + mean: '-1.657e-05' + min: '-1.120e-02' shape: - 1024 - sum: '-6.637e-02' + sum: '-1.697e-02' grads.network.model.decoder.layers.11.self_attn.v_proj.weight: device: cuda:0 - max: '3.206e-01' - mean: '6.076e-07' - min: '-2.238e-01' + max: '2.806e-01' + mean: '1.554e-07' + min: '-2.307e-01' shape: - 1024 - 1024 - sum: '6.371e-01' + sum: '1.629e-01' grads.network.model.decoder.layers.11.self_attn_layer_norm.bias: device: cuda:0 - max: '1.059e-02' - mean: '9.679e-05' - min: '-1.073e-02' + max: '1.090e-02' + mean: '4.103e-05' + min: '-1.074e-02' shape: - 1024 - sum: '9.911e-02' + sum: '4.202e-02' grads.network.model.decoder.layers.11.self_attn_layer_norm.weight: device: cuda:0 - max: '1.392e-02' - mean: '1.069e-05' - min: '-3.023e-02' + max: '9.913e-03' + mean: '8.734e-06' + min: '-2.563e-02' shape: - 1024 - sum: '1.094e-02' + sum: '8.943e-03' grads.network.model.decoder.layers.12.fc1.bias: device: cuda:0 - max: '4.561e-03' - mean: '-1.190e-05' - min: '-4.822e-03' + max: '4.174e-03' + mean: '-9.494e-06' + min: '-5.266e-03' shape: - 4096 - sum: '-4.876e-02' + sum: '-3.889e-02' grads.network.model.decoder.layers.12.fc1.weight: device: cuda:0 - max: '1.229e-01' - mean: '-5.228e-08' - min: '-1.465e-01' + max: '1.308e-01' + mean: '-4.169e-08' + min: '-1.225e-01' shape: - 4096 - 1024 - sum: '-2.193e-01' + sum: '-1.749e-01' grads.network.model.decoder.layers.12.fc2.bias: device: cuda:0 - max: '1.037e-02' - mean: '-1.455e-11' - min: '-9.052e-03' + max: '9.381e-03' + mean: '0.e+00' + min: '-9.925e-03' shape: - 1024 - sum: '-1.490e-08' + sum: '0.e+00' grads.network.model.decoder.layers.12.fc2.weight: device: cuda:0 - max: '1.393e-02' - mean: '6.821e-13' - min: '-1.541e-02' + max: '1.477e-02' + mean: '-1.137e-13' + min: '-1.799e-02' shape: - 1024 - 4096 - sum: '2.861e-06' + sum: '-4.768e-07' grads.network.model.decoder.layers.12.final_layer_norm.bias: device: cuda:0 - max: '1.185e-02' - mean: '-1.402e-04' - min: '-1.030e-02' + max: '1.085e-02' + mean: '-6.289e-05' + min: '-1.164e-02' shape: - 1024 - sum: '-1.436e-01' + sum: '-6.440e-02' grads.network.model.decoder.layers.12.final_layer_norm.weight: device: cuda:0 - max: '2.753e-02' - mean: '8.06e-06' - min: '-2.950e-02' + max: '2.347e-02' + mean: '1.717e-05' + min: '-3.135e-02' shape: - 1024 - sum: '8.253e-03' + sum: '1.758e-02' grads.network.model.decoder.layers.12.self_attn.k_proj.bias: device: cuda:0 - max: '1.048e-09' - mean: '-1.202e-12' - min: '-5.821e-10' + max: '6.694e-10' + mean: '8.309e-13' + min: '-4.948e-10' shape: - 1024 - sum: '-1.231e-09' + sum: '8.508e-10' grads.network.model.decoder.layers.12.self_attn.k_proj.weight: device: cuda:0 - max: '7.339e-02' - mean: '4.055e-13' - min: '-1.12e-01' + max: '7.397e-02' + mean: '-2.175e-13' + min: '-9.768e-02' shape: - 1024 - 1024 - sum: '4.252e-07' + sum: '-2.281e-07' grads.network.model.decoder.layers.12.self_attn.out_proj.bias: device: cuda:0 - max: '1.012e-02' - mean: '-1.455e-11' - min: '-9.195e-03' + max: '9.249e-03' + mean: '-7.276e-12' + min: '-9.731e-03' shape: - 1024 - sum: '-1.490e-08' + sum: '-7.451e-09' grads.network.model.decoder.layers.12.self_attn.out_proj.weight: device: cuda:0 - max: '2.358e-03' - mean: '2.132e-14' - min: '-2.490e-03' + max: '4.412e-03' + mean: '1.421e-13' + min: '-4.588e-03' shape: - 1024 - 1024 - sum: '2.235e-08' + sum: '1.490e-07' grads.network.model.decoder.layers.12.self_attn.q_proj.bias: device: cuda:0 - max: '4.276e-03' - mean: '3.084e-05' - min: '-2.643e-03' + max: '3.407e-03' + mean: '2.445e-05' + min: '-1.779e-03' shape: - 1024 - sum: '3.158e-02' + sum: '2.504e-02' grads.network.model.decoder.layers.12.self_attn.q_proj.weight: device: cuda:0 - max: '3.563e-02' - mean: '-4.485e-07' - min: '-3.289e-02' + max: '4.225e-02' + mean: '-3.557e-07' + min: '-4.189e-02' shape: - 1024 - 1024 - sum: '-4.703e-01' + sum: '-3.729e-01' grads.network.model.decoder.layers.12.self_attn.v_proj.bias: device: cuda:0 - max: '8.738e-03' - mean: '1.154e-04' - min: '-8.845e-03' + max: '8.426e-03' + mean: '2.616e-05' + min: '-1.041e-02' shape: - 1024 - sum: '1.181e-01' + sum: '2.679e-02' grads.network.model.decoder.layers.12.self_attn.v_proj.weight: device: cuda:0 - max: '2.204e-01' - mean: '-1.678e-06' - min: '-2.329e-01' + max: '2.573e-01' + mean: '-3.806e-07' + min: '-2.223e-01' shape: - 1024 - 1024 - sum: '-1.76e+00' + sum: '-3.990e-01' grads.network.model.decoder.layers.12.self_attn_layer_norm.bias: device: cuda:0 - max: '1.051e-02' - mean: '3.206e-05' - min: '-9.447e-03' + max: '9.540e-03' + mean: '1.539e-05' + min: '-1.009e-02' shape: - 1024 - sum: '3.283e-02' + sum: '1.576e-02' grads.network.model.decoder.layers.12.self_attn_layer_norm.weight: device: cuda:0 - max: '1.615e-02' - mean: '1.067e-06' - min: '-2.743e-02' + max: '1.112e-02' + mean: '6.956e-06' + min: '-3.292e-02' shape: - 1024 - sum: '1.093e-03' + sum: '7.123e-03' grads.network.model.decoder.layers.13.fc1.bias: device: cuda:0 - max: '4.401e-03' - mean: '-9.962e-06' - min: '-3.711e-03' + max: '4.255e-03' + mean: '-6.284e-06' + min: '-3.659e-03' shape: - 4096 - sum: '-4.080e-02' + sum: '-2.574e-02' grads.network.model.decoder.layers.13.fc1.weight: device: cuda:0 - max: '9.876e-02' - mean: '-3.052e-08' - min: '-8.944e-02' + max: '9.864e-02' + mean: '-1.925e-08' + min: '-8.668e-02' shape: - 4096 - 1024 - sum: '-1.280e-01' + sum: '-8.074e-02' grads.network.model.decoder.layers.13.fc2.bias: device: cuda:0 - max: '9.355e-03' - mean: '1.455e-11' - min: '-9.44e-03' + max: '8.901e-03' + mean: '-9.095e-12' + min: '-9.272e-03' shape: - 1024 - sum: '1.490e-08' + sum: '-9.313e-09' grads.network.model.decoder.layers.13.fc2.weight: device: cuda:0 - max: '8.875e-03' - mean: '4.547e-13' - min: '-1.118e-02' + max: '9.958e-03' + mean: '-1.137e-13' + min: '-1.159e-02' shape: - 1024 - 4096 - sum: '1.907e-06' + sum: '-4.768e-07' grads.network.model.decoder.layers.13.final_layer_norm.bias: device: cuda:0 - max: '1.149e-02' - mean: '7.673e-05' - min: '-1.144e-02' + max: '1.098e-02' + mean: '1.136e-04' + min: '-1.088e-02' shape: - 1024 - sum: '7.857e-02' + sum: '1.163e-01' grads.network.model.decoder.layers.13.final_layer_norm.weight: device: cuda:0 - max: '4.016e-02' - mean: '2.041e-05' - min: '-2.390e-02' + max: '3.056e-02' + mean: '2.505e-06' + min: '-2.49e-02' shape: - 1024 - sum: '2.09e-02' + sum: '2.565e-03' grads.network.model.decoder.layers.13.self_attn.k_proj.bias: device: cuda:0 - max: '3.492e-10' - mean: '1.113e-12' - min: '-3.129e-10' + max: '3.056e-10' + mean: '-3.326e-12' + min: '-4.657e-10' shape: - 1024 - sum: '1.140e-09' + sum: '-3.406e-09' grads.network.model.decoder.layers.13.self_attn.k_proj.weight: device: cuda:0 - max: '2.291e-02' - mean: '1.439e-13' - min: '-3.283e-02' + max: '3.654e-02' + mean: '2.432e-13' + min: '-4.357e-02' shape: - 1024 - 1024 - sum: '1.509e-07' + sum: '2.551e-07' grads.network.model.decoder.layers.13.self_attn.out_proj.bias: device: cuda:0 - max: '8.137e-03' - mean: '1.455e-11' - min: '-7.886e-03' + max: '7.424e-03' + mean: '-3.638e-12' + min: '-9.317e-03' shape: - 1024 - sum: '1.490e-08' + sum: '-3.725e-09' grads.network.model.decoder.layers.13.self_attn.out_proj.weight: device: cuda:0 - max: '2.711e-03' - mean: '-1.172e-13' - min: '-2.667e-03' + max: '3.228e-03' + mean: '7.105e-14' + min: '-2.774e-03' shape: - 1024 - 1024 - sum: '-1.229e-07' + sum: '7.451e-08' grads.network.model.decoder.layers.13.self_attn.q_proj.bias: device: cuda:0 - max: '2.952e-03' - mean: '2.080e-05' - min: '-1.742e-03' + max: '2.412e-03' + mean: '1.546e-05' + min: '-1.678e-03' shape: - 1024 - sum: '2.13e-02' + sum: '1.583e-02' grads.network.model.decoder.layers.13.self_attn.q_proj.weight: device: cuda:0 - max: '2.432e-02' - mean: '-3.182e-07' - min: '-2.134e-02' + max: '1.646e-02' + mean: '-2.364e-07' + min: '-1.986e-02' shape: - 1024 - 1024 - sum: '-3.336e-01' + sum: '-2.479e-01' grads.network.model.decoder.layers.13.self_attn.v_proj.bias: device: cuda:0 - max: '7.585e-03' - mean: '-2.298e-05' - min: '-7.604e-03' + max: '9.358e-03' + mean: '-2.785e-05' + min: '-8.192e-03' shape: - 1024 - sum: '-2.354e-02' + sum: '-2.851e-02' grads.network.model.decoder.layers.13.self_attn.v_proj.weight: device: cuda:0 - max: '1.814e-01' - mean: '3.516e-07' - min: '-2.040e-01' + max: '2.093e-01' + mean: '4.26e-07' + min: '-2.454e-01' shape: - 1024 - 1024 - sum: '3.687e-01' + sum: '4.467e-01' grads.network.model.decoder.layers.13.self_attn_layer_norm.bias: device: cuda:0 - max: '8.601e-03' - mean: '4.474e-05' - min: '-8.111e-03' + max: '7.755e-03' + mean: '4.027e-05' + min: '-9.616e-03' shape: - 1024 - sum: '4.582e-02' + sum: '4.124e-02' grads.network.model.decoder.layers.13.self_attn_layer_norm.weight: device: cuda:0 - max: '1.692e-02' - mean: '2.716e-06' - min: '-2.945e-02' + max: '1.237e-02' + mean: '2.634e-06' + min: '-3.056e-02' shape: - 1024 - sum: '2.781e-03' + sum: '2.697e-03' grads.network.model.decoder.layers.14.fc1.bias: device: cuda:0 - max: '4.022e-03' - mean: '-3.262e-06' - min: '-4.242e-03' + max: '3.368e-03' + mean: '-4.94e-06' + min: '-4.024e-03' shape: - 4096 - sum: '-1.336e-02' + sum: '-2.023e-02' grads.network.model.decoder.layers.14.fc1.weight: device: cuda:0 - max: '1.062e-01' - mean: '-3.092e-09' - min: '-8.975e-02' + max: '1.023e-01' + mean: '-4.683e-09' + min: '-8.753e-02' shape: - 4096 - 1024 - sum: '-1.297e-02' + sum: '-1.964e-02' grads.network.model.decoder.layers.14.fc2.bias: device: cuda:0 - max: '9.839e-03' - mean: '1.455e-11' - min: '-8.348e-03' + max: '9.881e-03' + mean: '-2.183e-11' + min: '-9.016e-03' shape: - 1024 - sum: '1.490e-08' + sum: '-2.235e-08' grads.network.model.decoder.layers.14.fc2.weight: device: cuda:0 - max: '1.501e-02' - mean: '4.547e-13' - min: '-1.745e-02' + max: '1.668e-02' + mean: '-1.592e-12' + min: '-1.498e-02' shape: - 1024 - 4096 - sum: '1.907e-06' + sum: '-6.676e-06' grads.network.model.decoder.layers.14.final_layer_norm.bias: device: cuda:0 - max: '1.123e-02' - mean: '-4.262e-05' - min: '-9.990e-03' + max: '1.219e-02' + mean: '2.743e-05' + min: '-1.083e-02' shape: - 1024 - sum: '-4.365e-02' + sum: '2.809e-02' grads.network.model.decoder.layers.14.final_layer_norm.weight: device: cuda:0 - max: '1.884e-02' - mean: '1.767e-05' - min: '-3.378e-02' + max: '1.590e-02' + mean: '-4.36e-06' + min: '-3.127e-02' shape: - 1024 - sum: '1.809e-02' + sum: '-4.464e-03' grads.network.model.decoder.layers.14.self_attn.k_proj.bias: device: cuda:0 - max: '3.638e-10' - mean: '1.328e-13' - min: '-4.220e-10' + max: '3.929e-10' + mean: '-2.173e-12' + min: '-3.056e-10' shape: - 1024 - sum: '1.36e-10' + sum: '-2.226e-09' grads.network.model.decoder.layers.14.self_attn.k_proj.weight: device: cuda:0 - max: '6.98e-02' - mean: '-4.363e-14' - min: '-4.248e-02' + max: '5.135e-02' + mean: '-5.795e-14' + min: '-4.326e-02' shape: - 1024 - 1024 - sum: '-4.575e-08' + sum: '-6.077e-08' grads.network.model.decoder.layers.14.self_attn.out_proj.bias: device: cuda:0 - max: '8.645e-03' - mean: '0.e+00' - min: '-7.605e-03' + max: '9.779e-03' + mean: '9.095e-12' + min: '-8.985e-03' shape: - 1024 - sum: '0.e+00' + sum: '9.313e-09' grads.network.model.decoder.layers.14.self_attn.out_proj.weight: device: cuda:0 - max: '2.700e-03' - mean: '-1.137e-13' - min: '-2.869e-03' + max: '2.521e-03' + mean: '-2.842e-14' + min: '-2.492e-03' shape: - 1024 - 1024 - sum: '-1.192e-07' + sum: '-2.980e-08' grads.network.model.decoder.layers.14.self_attn.q_proj.bias: device: cuda:0 - max: '2.104e-03' - mean: '-8.403e-06' - min: '-5.177e-03' + max: '2.483e-03' + mean: '-2.104e-05' + min: '-4.766e-03' shape: - 1024 - sum: '-8.605e-03' + sum: '-2.155e-02' grads.network.model.decoder.layers.14.self_attn.q_proj.weight: device: cuda:0 - max: '3.976e-02' - mean: '1.967e-07' - min: '-2.941e-02' + max: '3.591e-02' + mean: '4.924e-07' + min: '-2.957e-02' shape: - 1024 - 1024 - sum: '2.062e-01' + sum: '5.163e-01' grads.network.model.decoder.layers.14.self_attn.v_proj.bias: device: cuda:0 - max: '8.858e-03' - mean: '7.677e-05' - min: '-9.02e-03' + max: '8.477e-03' + mean: '1.055e-04' + min: '-8.184e-03' shape: - 1024 - sum: '7.861e-02' + sum: '1.081e-01' grads.network.model.decoder.layers.14.self_attn.v_proj.weight: device: cuda:0 - max: '2.243e-01' - mean: '-1.797e-06' - min: '-2.274e-01' + max: '2.027e-01' + mean: '-2.47e-06' + min: '-2.218e-01' shape: - 1024 - 1024 - sum: '-1.884e+00' + sum: '-2.59e+00' grads.network.model.decoder.layers.14.self_attn_layer_norm.bias: device: cuda:0 - max: '8.952e-03' - mean: '2.587e-05' - min: '-8.003e-03' + max: '1.029e-02' + mean: '4.850e-05' + min: '-9.323e-03' shape: - 1024 - sum: '2.649e-02' + sum: '4.967e-02' grads.network.model.decoder.layers.14.self_attn_layer_norm.weight: device: cuda:0 - max: '1.824e-02' - mean: '5.427e-06' - min: '-3.480e-02' + max: '1.910e-02' + mean: '5.651e-06' + min: '-3.208e-02' shape: - 1024 - sum: '5.557e-03' + sum: '5.786e-03' grads.network.model.decoder.layers.15.fc1.bias: device: cuda:0 - max: '6.084e-03' - mean: '-8.483e-06' - min: '-3.799e-03' + max: '5.394e-03' + mean: '-1.012e-05' + min: '-6.176e-03' shape: - 4096 - sum: '-3.475e-02' + sum: '-4.146e-02' grads.network.model.decoder.layers.15.fc1.weight: device: cuda:0 - max: '8.858e-02' - mean: '-8.764e-09' - min: '-1.116e-01' + max: '8.324e-02' + mean: '-1.046e-08' + min: '-1.047e-01' shape: - 4096 - 1024 - sum: '-3.676e-02' + sum: '-4.386e-02' grads.network.model.decoder.layers.15.fc2.bias: device: cuda:0 - max: '1.051e-02' - mean: '1.455e-11' - min: '-1.089e-02' + max: '9.866e-03' + mean: '-7.276e-12' + min: '-1.172e-02' shape: - 1024 - sum: '1.490e-08' + sum: '-7.451e-09' grads.network.model.decoder.layers.15.fc2.weight: device: cuda:0 - max: '1.521e-02' - mean: '4.547e-13' - min: '-1.284e-02' + max: '1.37e-02' + mean: '-5.684e-13' + min: '-1.439e-02' shape: - 1024 - 4096 - sum: '1.907e-06' + sum: '-2.384e-06' grads.network.model.decoder.layers.15.final_layer_norm.bias: device: cuda:0 - max: '1.172e-02' - mean: '-6.644e-05' - min: '-1.335e-02' + max: '1.231e-02' + mean: '-1.332e-04' + min: '-1.468e-02' shape: - 1024 - sum: '-6.804e-02' + sum: '-1.364e-01' grads.network.model.decoder.layers.15.final_layer_norm.weight: device: cuda:0 - max: '2.24e-02' - mean: '-2.669e-06' - min: '-3.526e-02' + max: '3.634e-02' + mean: '1.128e-05' + min: '-3.444e-02' shape: - 1024 - sum: '-2.733e-03' + sum: '1.155e-02' grads.network.model.decoder.layers.15.self_attn.k_proj.bias: device: cuda:0 - max: '1.055e-09' - mean: '7.491e-13' - min: '-4.802e-10' + max: '1.164e-09' + mean: '3.457e-12' + min: '-4.657e-10' shape: - 1024 - sum: '7.670e-10' + sum: '3.54e-09' grads.network.model.decoder.layers.15.self_attn.k_proj.weight: device: cuda:0 - max: '1.531e-02' - mean: '-8.044e-14' - min: '-1.541e-02' + max: '3.154e-02' + mean: '4.652e-14' + min: '-2.124e-02' shape: - 1024 - 1024 - sum: '-8.434e-08' + sum: '4.878e-08' grads.network.model.decoder.layers.15.self_attn.out_proj.bias: device: cuda:0 - max: '1.033e-02' - mean: '1.091e-11' - min: '-8.666e-03' + max: '9.871e-03' + mean: '-1.455e-11' + min: '-9.811e-03' shape: - 1024 - sum: '1.118e-08' + sum: '-1.490e-08' grads.network.model.decoder.layers.15.self_attn.out_proj.weight: device: cuda:0 - max: '4.471e-03' - mean: '3.055e-13' - min: '-5.652e-03' + max: '4.353e-03' + mean: '1.421e-14' + min: '-4.717e-03' shape: - 1024 - 1024 - sum: '3.204e-07' + sum: '1.490e-08' grads.network.model.decoder.layers.15.self_attn.q_proj.bias: device: cuda:0 - max: '9.621e-04' - mean: '7.166e-06' - min: '-1.421e-03' + max: '1.886e-03' + mean: '2.190e-05' + min: '-2.335e-03' shape: - 1024 - sum: '7.338e-03' + sum: '2.243e-02' grads.network.model.decoder.layers.15.self_attn.q_proj.weight: device: cuda:0 - max: '1.186e-02' - mean: '-1.556e-07' - min: '-1.624e-02' + max: '2.037e-02' + mean: '-4.754e-07' + min: '-2.289e-02' shape: - 1024 - 1024 - sum: '-1.631e-01' + sum: '-4.985e-01' grads.network.model.decoder.layers.15.self_attn.v_proj.bias: device: cuda:0 - max: '7.926e-03' - mean: '-1.794e-04' - min: '-8.628e-03' + max: '7.805e-03' + mean: '-4.434e-05' + min: '-9.824e-03' shape: - 1024 - sum: '-1.837e-01' + sum: '-4.541e-02' grads.network.model.decoder.layers.15.self_attn.v_proj.weight: device: cuda:0 - max: '1.764e-01' - mean: '3.894e-06' - min: '-1.749e-01' + max: '1.984e-01' + mean: '9.627e-07' + min: '-1.703e-01' shape: - 1024 - 1024 - sum: '4.083e+00' + sum: '1.009e+00' grads.network.model.decoder.layers.15.self_attn_layer_norm.bias: device: cuda:0 - max: '1.129e-02' - mean: '1.039e-04' - min: '-9.336e-03' + max: '1.079e-02' + mean: '1.138e-04' + min: '-1.047e-02' shape: - 1024 - sum: '1.064e-01' + sum: '1.165e-01' grads.network.model.decoder.layers.15.self_attn_layer_norm.weight: device: cuda:0 - max: '1.954e-02' - mean: '2.421e-06' - min: '-3.688e-02' + max: '1.985e-02' + mean: '-3.775e-06' + min: '-3.666e-02' shape: - 1024 - sum: '2.479e-03' + sum: '-3.866e-03' grads.network.model.decoder.layers.16.fc1.bias: device: cuda:0 - max: '4.387e-03' - mean: '-1.177e-06' - min: '-4.594e-03' + max: '4.077e-03' + mean: '2.515e-06' + min: '-4.591e-03' shape: - 4096 - sum: '-4.820e-03' + sum: '1.030e-02' grads.network.model.decoder.layers.16.fc1.weight: device: cuda:0 - max: '9.725e-02' - mean: '-1.358e-09' - min: '-1.095e-01' + max: '1.095e-01' + mean: '2.903e-09' + min: '-1.061e-01' shape: - 4096 - 1024 - sum: '-5.697e-03' + sum: '1.218e-02' grads.network.model.decoder.layers.16.fc2.bias: device: cuda:0 - max: '1.269e-02' - mean: '-2.183e-11' - min: '-1.081e-02' + max: '1.072e-02' + mean: '0.e+00' + min: '-1.028e-02' shape: - 1024 - sum: '-2.235e-08' + sum: '0.e+00' grads.network.model.decoder.layers.16.fc2.weight: device: cuda:0 - max: '3.339e-02' - mean: '-9.095e-13' - min: '-2.250e-02' + max: '2.759e-02' + mean: '0.e+00' + min: '-2.188e-02' shape: - 1024 - 4096 - sum: '-3.815e-06' + sum: '0.e+00' grads.network.model.decoder.layers.16.final_layer_norm.bias: device: cuda:0 - max: '1.527e-02' - mean: '2.65e-04' - min: '-1.338e-02' + max: '1.385e-02' + mean: '3.693e-04' + min: '-1.169e-02' shape: - 1024 - sum: '2.713e-01' + sum: '3.781e-01' grads.network.model.decoder.layers.16.final_layer_norm.weight: device: cuda:0 - max: '2.378e-02' - mean: '-1.535e-05' - min: '-2.549e-02' + max: '2.044e-02' + mean: '-2.249e-06' + min: '-2.405e-02' shape: - 1024 - sum: '-1.572e-02' + sum: '-2.303e-03' grads.network.model.decoder.layers.16.self_attn.k_proj.bias: device: cuda:0 - max: '3.492e-10' - mean: '-1.085e-12' - min: '-3.783e-10' + max: '4.657e-10' + mean: '-1.148e-12' + min: '-4.657e-10' shape: - 1024 - sum: '-1.111e-09' + sum: '-1.176e-09' grads.network.model.decoder.layers.16.self_attn.k_proj.weight: device: cuda:0 - max: '2.069e-02' - mean: '-1.421e-14' - min: '-2.927e-02' + max: '2.442e-02' + mean: '7.527e-14' + min: '-2.925e-02' shape: - 1024 - 1024 - sum: '-1.490e-08' + sum: '7.893e-08' grads.network.model.decoder.layers.16.self_attn.out_proj.bias: device: cuda:0 - max: '1.110e-02' - mean: '2.183e-11' - min: '-1.106e-02' + max: '8.875e-03' + mean: '0.e+00' + min: '-9.845e-03' shape: - 1024 - sum: '2.235e-08' + sum: '0.e+00' grads.network.model.decoder.layers.16.self_attn.out_proj.weight: device: cuda:0 - max: '3.313e-03' - mean: '1.208e-13' - min: '-3.429e-03' + max: '2.749e-03' + mean: '-1.563e-13' + min: '-2.783e-03' shape: - 1024 - 1024 - sum: '1.267e-07' + sum: '-1.639e-07' grads.network.model.decoder.layers.16.self_attn.q_proj.bias: device: cuda:0 - max: '1.952e-03' - mean: '-1.946e-06' - min: '-1.790e-03' + max: '1.541e-03' + mean: '-7.89e-06' + min: '-2.125e-03' shape: - 1024 - sum: '-1.993e-03' + sum: '-8.079e-03' grads.network.model.decoder.layers.16.self_attn.q_proj.weight: device: cuda:0 - max: '1.804e-02' - mean: '4.067e-08' - min: '-1.849e-02' + max: '2.979e-02' + mean: '1.649e-07' + min: '-3.029e-02' shape: - 1024 - 1024 - sum: '4.264e-02' + sum: '1.729e-01' grads.network.model.decoder.layers.16.self_attn.v_proj.bias: device: cuda:0 - max: '1.061e-02' - mean: '-1.323e-04' - min: '-1.051e-02' + max: '9.657e-03' + mean: '-1.308e-04' + min: '-9.640e-03' shape: - 1024 - sum: '-1.355e-01' + sum: '-1.339e-01' grads.network.model.decoder.layers.16.self_attn.v_proj.weight: device: cuda:0 - max: '2.588e-01' - mean: '2.764e-06' - min: '-2.409e-01' + max: '2.179e-01' + mean: '2.732e-06' + min: '-2.213e-01' shape: - 1024 - 1024 - sum: '2.898e+00' + sum: '2.865e+00' grads.network.model.decoder.layers.16.self_attn_layer_norm.bias: device: cuda:0 - max: '1.140e-02' - mean: '-7.85e-05' - min: '-1.185e-02' + max: '9.162e-03' + mean: '-9.535e-05' + min: '-1.059e-02' shape: - 1024 - sum: '-8.038e-02' + sum: '-9.764e-02' grads.network.model.decoder.layers.16.self_attn_layer_norm.weight: device: cuda:0 - max: '2.204e-02' - mean: '6.894e-06' - min: '-3.184e-02' + max: '2.578e-02' + mean: '9.235e-06' + min: '-2.987e-02' shape: - 1024 - sum: '7.059e-03' + sum: '9.457e-03' grads.network.model.decoder.layers.17.fc1.bias: device: cuda:0 - max: '6.26e-03' - mean: '2.31e-06' - min: '-5.628e-03' + max: '6.044e-03' + mean: '2.890e-06' + min: '-6.564e-03' shape: - 4096 - sum: '9.461e-03' + sum: '1.184e-02' grads.network.model.decoder.layers.17.fc1.weight: device: cuda:0 - max: '1.350e-01' - mean: '4.019e-10' - min: '-1.688e-01' + max: '1.345e-01' + mean: '5.029e-10' + min: '-1.541e-01' shape: - 4096 - 1024 - sum: '1.686e-03' + sum: '2.109e-03' grads.network.model.decoder.layers.17.fc2.bias: device: cuda:0 - max: '1.649e-02' - mean: '-2.183e-11' - min: '-1.481e-02' + max: '1.305e-02' + mean: '0.e+00' + min: '-1.607e-02' shape: - 1024 - sum: '-2.235e-08' + sum: '0.e+00' grads.network.model.decoder.layers.17.fc2.weight: device: cuda:0 - max: '3.401e-02' - mean: '-9.095e-13' - min: '-2.889e-02' + max: '2.616e-02' + mean: '0.e+00' + min: '-3.049e-02' shape: - 1024 - 4096 - sum: '-3.815e-06' + sum: '0.e+00' grads.network.model.decoder.layers.17.final_layer_norm.bias: device: cuda:0 - max: '1.855e-02' - mean: '-3.642e-04' - min: '-1.788e-02' + max: '1.535e-02' + mean: '-2.257e-04' + min: '-1.923e-02' shape: - 1024 - sum: '-3.73e-01' + sum: '-2.311e-01' grads.network.model.decoder.layers.17.final_layer_norm.weight: device: cuda:0 - max: '3.625e-02' - mean: '4.667e-05' - min: '-2.155e-02' + max: '3.850e-02' + mean: '2.985e-05' + min: '-2.193e-02' shape: - 1024 - sum: '4.779e-02' + sum: '3.056e-02' grads.network.model.decoder.layers.17.self_attn.k_proj.bias: device: cuda:0 - max: '2.401e-10' - mean: '1.044e-12' - min: '-2.037e-10' + max: '3.201e-10' + mean: '1.170e-12' + min: '-2.183e-10' shape: - 1024 - sum: '1.069e-09' + sum: '1.198e-09' grads.network.model.decoder.layers.17.self_attn.k_proj.weight: device: cuda:0 - max: '1.855e-02' - mean: '-1.524e-13' - min: '-1.911e-02' + max: '1.88e-02' + mean: '1.493e-13' + min: '-1.416e-02' shape: - 1024 - 1024 - sum: '-1.598e-07' + sum: '1.566e-07' grads.network.model.decoder.layers.17.self_attn.out_proj.bias: device: cuda:0 - max: '1.518e-02' + max: '1.277e-02' mean: '-1.455e-11' - min: '-1.354e-02' + min: '-1.398e-02' shape: - 1024 sum: '-1.490e-08' grads.network.model.decoder.layers.17.self_attn.out_proj.weight: device: cuda:0 - max: '4.101e-03' - mean: '1.812e-13' - min: '-4.541e-03' + max: '3.332e-03' + mean: '9.592e-14' + min: '-4.020e-03' shape: - 1024 - 1024 - sum: '1.9e-07' + sum: '1.006e-07' grads.network.model.decoder.layers.17.self_attn.q_proj.bias: device: cuda:0 - max: '1.11e-03' - mean: '6.052e-06' - min: '-2.488e-03' + max: '8.169e-04' + mean: '1.575e-07' + min: '-1.763e-03' shape: - 1024 - sum: '6.197e-03' + sum: '1.613e-04' grads.network.model.decoder.layers.17.self_attn.q_proj.weight: device: cuda:0 - max: '3.155e-02' - mean: '-1.032e-07' - min: '-1.135e-02' + max: '2.347e-02' + mean: '-2.684e-09' + min: '-1.066e-02' shape: - 1024 - 1024 - sum: '-1.082e-01' + sum: '-2.815e-03' grads.network.model.decoder.layers.17.self_attn.v_proj.bias: device: cuda:0 - max: '1.409e-02' - mean: '-2.352e-05' - min: '-1.076e-02' + max: '1.098e-02' + mean: '-1.444e-05' + min: '-1.304e-02' shape: - 1024 - sum: '-2.409e-02' + sum: '-1.479e-02' grads.network.model.decoder.layers.17.self_attn.v_proj.weight: device: cuda:0 - max: '2.998e-01' - mean: '4.009e-07' - min: '-3.809e-01' + max: '3.683e-01' + mean: '2.462e-07' + min: '-3.150e-01' shape: - 1024 - 1024 - sum: '4.204e-01' + sum: '2.581e-01' grads.network.model.decoder.layers.17.self_attn_layer_norm.bias: device: cuda:0 - max: '1.61e-02' - mean: '-1.565e-05' - min: '-1.437e-02' + max: '1.358e-02' + mean: '-5.711e-06' + min: '-1.483e-02' shape: - 1024 - sum: '-1.603e-02' + sum: '-5.848e-03' grads.network.model.decoder.layers.17.self_attn_layer_norm.weight: device: cuda:0 - max: '2.386e-02' - mean: '5.609e-06' - min: '-1.978e-02' + max: '2.098e-02' + mean: '3.371e-06' + min: '-1.99e-02' shape: - 1024 - sum: '5.744e-03' + sum: '3.452e-03' grads.network.model.decoder.layers.18.fc1.bias: device: cuda:0 - max: '9.537e-03' - mean: '2.52e-07' - min: '-6.979e-03' + max: '1.147e-02' + mean: '-5.311e-06' + min: '-7.232e-03' shape: - 4096 - sum: '1.032e-03' + sum: '-2.175e-02' grads.network.model.decoder.layers.18.fc1.weight: device: cuda:0 - max: '2.336e-01' - mean: '4.358e-10' - min: '-2.608e-01' + max: '1.619e-01' + mean: '-9.185e-09' + min: '-3.223e-01' shape: - 4096 - 1024 - sum: '1.828e-03' + sum: '-3.853e-02' grads.network.model.decoder.layers.18.fc2.bias: device: cuda:0 - max: '1.465e-02' - mean: '-1.819e-11' - min: '-1.239e-02' + max: '1.429e-02' + mean: '0.e+00' + min: '-1.499e-02' shape: - 1024 - sum: '-1.863e-08' + sum: '0.e+00' grads.network.model.decoder.layers.18.fc2.weight: device: cuda:0 - max: '2.649e-02' - mean: '0.e+00' - min: '-1.881e-02' + max: '2.821e-02' + mean: '-2.274e-13' + min: '-2.067e-02' shape: - 1024 - 4096 - sum: '0.e+00' + sum: '-9.537e-07' grads.network.model.decoder.layers.18.final_layer_norm.bias: device: cuda:0 - max: '1.606e-02' - mean: '1.368e-04' - min: '-1.438e-02' + max: '1.670e-02' + mean: '2.067e-04' + min: '-1.701e-02' shape: - 1024 - sum: '1.401e-01' + sum: '2.117e-01' grads.network.model.decoder.layers.18.final_layer_norm.weight: device: cuda:0 - max: '1.965e-02' - mean: '-4.229e-05' - min: '-1.566e-02' + max: '1.673e-02' + mean: '-3.888e-05' + min: '-1.522e-02' shape: - 1024 - sum: '-4.330e-02' + sum: '-3.981e-02' grads.network.model.decoder.layers.18.self_attn.k_proj.bias: device: cuda:0 - max: '6.403e-10' - mean: '-3.804e-13' - min: '-3.056e-10' + max: '8.731e-10' + mean: '2.129e-12' + min: '-4.075e-10' shape: - 1024 - sum: '-3.895e-10' + sum: '2.18e-09' grads.network.model.decoder.layers.18.self_attn.k_proj.weight: device: cuda:0 - max: '5.736e-02' - mean: '1.643e-14' - min: '-8.238e-02' + max: '4.180e-02' + mean: '1.821e-14' + min: '-5.685e-02' shape: - 1024 - 1024 - sum: '1.723e-08' + sum: '1.909e-08' grads.network.model.decoder.layers.18.self_attn.out_proj.bias: device: cuda:0 - max: '1.309e-02' - mean: '-2.183e-11' - min: '-1.086e-02' + max: '1.283e-02' + mean: '7.276e-12' + min: '-1.266e-02' shape: - 1024 - sum: '-2.235e-08' + sum: '7.451e-09' grads.network.model.decoder.layers.18.self_attn.out_proj.weight: device: cuda:0 - max: '2.482e-03' - mean: '-1.563e-13' - min: '-3.289e-03' + max: '2.322e-03' + mean: '2.842e-14' + min: '-2.526e-03' shape: - 1024 - 1024 - sum: '-1.639e-07' + sum: '2.980e-08' grads.network.model.decoder.layers.18.self_attn.q_proj.bias: device: cuda:0 - max: '8.627e-03' - mean: '-5.75e-06' - min: '-8.369e-03' + max: '5.705e-03' + mean: '-1.891e-05' + min: '-5.284e-03' shape: - 1024 - sum: '-5.888e-03' + sum: '-1.937e-02' grads.network.model.decoder.layers.18.self_attn.q_proj.weight: device: cuda:0 - max: '1.070e-01' - mean: '7.839e-08' - min: '-1.119e-01' + max: '7.843e-02' + mean: '2.579e-07' + min: '-8.680e-02' shape: - 1024 - 1024 - sum: '8.220e-02' + sum: '2.704e-01' grads.network.model.decoder.layers.18.self_attn.v_proj.bias: device: cuda:0 - max: '1.567e-02' - mean: '8.644e-05' - min: '-1.514e-02' + max: '1.423e-02' + mean: '1.193e-04' + min: '-1.538e-02' shape: - 1024 - sum: '8.851e-02' + sum: '1.222e-01' grads.network.model.decoder.layers.18.self_attn.v_proj.weight: device: cuda:0 - max: '4.127e-01' - mean: '-1.178e-06' - min: '-4.298e-01' + max: '4.271e-01' + mean: '-1.627e-06' + min: '-3.934e-01' shape: - 1024 - 1024 - sum: '-1.236e+00' + sum: '-1.706e+00' grads.network.model.decoder.layers.18.self_attn_layer_norm.bias: device: cuda:0 - max: '1.364e-02' - mean: '3.632e-05' - min: '-1.140e-02' + max: '1.349e-02' + mean: '1.753e-06' + min: '-1.332e-02' shape: - 1024 - sum: '3.719e-02' + sum: '1.795e-03' grads.network.model.decoder.layers.18.self_attn_layer_norm.weight: device: cuda:0 - max: '1.925e-02' - mean: '2.831e-06' - min: '-2.016e-02' + max: '1.638e-02' + mean: '1.578e-06' + min: '-1.96e-02' shape: - 1024 - sum: '2.899e-03' + sum: '1.616e-03' grads.network.model.decoder.layers.19.fc1.bias: device: cuda:0 - max: '9.326e-03' - mean: '1.837e-07' - min: '-1.031e-02' + max: '1.043e-02' + mean: '3.285e-06' + min: '-8.926e-03' shape: - 4096 - sum: '7.523e-04' + sum: '1.346e-02' grads.network.model.decoder.layers.19.fc1.weight: device: cuda:0 - max: '2.191e-01' - mean: '6.108e-10' - min: '-2.314e-01' + max: '2.514e-01' + mean: '1.092e-08' + min: '-2.619e-01' shape: - 4096 - 1024 - sum: '2.562e-03' + sum: '4.581e-02' grads.network.model.decoder.layers.19.fc2.bias: device: cuda:0 - max: '1.581e-02' - mean: '0.e+00' - min: '-1.359e-02' + max: '1.579e-02' + mean: '7.276e-12' + min: '-1.67e-02' shape: - 1024 - sum: '0.e+00' + sum: '7.451e-09' grads.network.model.decoder.layers.19.fc2.weight: device: cuda:0 - max: '2.231e-02' + max: '2.852e-02' mean: '0.e+00' - min: '-2.506e-02' + min: '-2.674e-02' shape: - 1024 - 4096 sum: '0.e+00' grads.network.model.decoder.layers.19.final_layer_norm.bias: device: cuda:0 - max: '1.757e-02' - mean: '1.004e-04' - min: '-1.579e-02' + max: '1.804e-02' + mean: '8.083e-05' + min: '-1.924e-02' shape: - 1024 - sum: '1.028e-01' + sum: '8.276e-02' grads.network.model.decoder.layers.19.final_layer_norm.weight: device: cuda:0 - max: '1.497e-02' - mean: '7.640e-06' - min: '-1.806e-02' + max: '2.331e-02' + mean: '-1.504e-05' + min: '-1.230e-02' shape: - 1024 - sum: '7.824e-03' + sum: '-1.54e-02' grads.network.model.decoder.layers.19.self_attn.k_proj.bias: device: cuda:0 - max: '3.02e-10' - mean: '-5.693e-13' - min: '-2.474e-10' + max: '4.075e-10' + mean: '-1.247e-12' + min: '-4.948e-10' shape: - 1024 - sum: '-5.83e-10' + sum: '-1.277e-09' grads.network.model.decoder.layers.19.self_attn.k_proj.weight: device: cuda:0 - max: '6.374e-02' - mean: '-2.404e-14' - min: '-4.199e-02' + max: '4.950e-02' + mean: '1.668e-13' + min: '-3.336e-02' shape: - 1024 - 1024 - sum: '-2.520e-08' + sum: '1.749e-07' grads.network.model.decoder.layers.19.self_attn.out_proj.bias: device: cuda:0 - max: '1.581e-02' - mean: '-7.276e-12' - min: '-1.360e-02' + max: '1.443e-02' + mean: '4.366e-11' + min: '-1.464e-02' shape: - 1024 - sum: '-7.451e-09' + sum: '4.470e-08' grads.network.model.decoder.layers.19.self_attn.out_proj.weight: device: cuda:0 - max: '4.519e-03' - mean: '3.553e-14' - min: '-4.269e-03' + max: '5.047e-03' + mean: '1.137e-13' + min: '-4.323e-03' shape: - 1024 - 1024 - sum: '3.725e-08' + sum: '1.192e-07' grads.network.model.decoder.layers.19.self_attn.q_proj.bias: device: cuda:0 - max: '4.052e-03' - mean: '1.142e-05' - min: '-3.511e-03' + max: '2.846e-03' + mean: '-5.669e-06' + min: '-2.716e-03' shape: - 1024 - sum: '1.17e-02' + sum: '-5.805e-03' grads.network.model.decoder.layers.19.self_attn.q_proj.weight: device: cuda:0 - max: '6.677e-02' - mean: '-1.415e-07' - min: '-7.58e-02' + max: '5.232e-02' + mean: '7.022e-08' + min: '-5.666e-02' shape: - 1024 - 1024 - sum: '-1.483e-01' + sum: '7.363e-02' grads.network.model.decoder.layers.19.self_attn.v_proj.bias: device: cuda:0 - max: '1.518e-02' - mean: '-1.563e-04' - min: '-1.711e-02' + max: '1.353e-02' + mean: '-1.046e-04' + min: '-1.307e-02' shape: - 1024 - sum: '-1.600e-01' + sum: '-1.071e-01' grads.network.model.decoder.layers.19.self_attn.v_proj.weight: device: cuda:0 - max: '4.186e-01' - mean: '1.935e-06' - min: '-4.339e-01' + max: '3.506e-01' + mean: '1.296e-06' + min: '-3.869e-01' shape: - 1024 - 1024 - sum: '2.029e+00' + sum: '1.359e+00' grads.network.model.decoder.layers.19.self_attn_layer_norm.bias: device: cuda:0 - max: '1.691e-02' - mean: '5.710e-05' - min: '-1.452e-02' + max: '1.543e-02' + mean: '1.895e-05' + min: '-1.569e-02' shape: - 1024 - sum: '5.847e-02' + sum: '1.941e-02' grads.network.model.decoder.layers.19.self_attn_layer_norm.weight: device: cuda:0 - max: '1.504e-02' - mean: '-1.596e-06' - min: '-1.835e-02' + max: '1.44e-02' + mean: '5.186e-07' + min: '-1.104e-02' shape: - 1024 - sum: '-1.634e-03' + sum: '5.310e-04' grads.network.model.decoder.layers.2.fc1.bias: device: cuda:0 - max: '5.528e-03' - mean: '-4.982e-06' - min: '-7.129e-03' + max: '5.921e-03' + mean: '8.856e-06' + min: '-9.619e-03' shape: - 4096 - sum: '-2.040e-02' + sum: '3.627e-02' grads.network.model.decoder.layers.2.fc1.weight: device: cuda:0 - max: '8.963e-02' - mean: '9.519e-09' - min: '-1.056e-01' + max: '1.109e-01' + mean: '-1.692e-08' + min: '-1.033e-01' shape: - 4096 - 1024 - sum: '3.993e-02' + sum: '-7.098e-02' grads.network.model.decoder.layers.2.fc2.bias: device: cuda:0 - max: '8.683e-03' - mean: '0.e+00' - min: '-7.982e-03' + max: '8.814e-03' + mean: '1.455e-11' + min: '-9.890e-03' shape: - 1024 - sum: '0.e+00' + sum: '1.490e-08' grads.network.model.decoder.layers.2.fc2.weight: device: cuda:0 - max: '6.756e-03' - mean: '-5.684e-14' - min: '-6.235e-03' + max: '8.03e-03' + mean: '1.705e-13' + min: '-7.305e-03' shape: - 1024 - 4096 - sum: '-2.384e-07' + sum: '7.153e-07' grads.network.model.decoder.layers.2.final_layer_norm.bias: device: cuda:0 - max: '9.485e-03' - mean: '-8.647e-06' - min: '-9.094e-03' + max: '1.062e-02' + mean: '2.142e-05' + min: '-9.885e-03' shape: - 1024 - sum: '-8.854e-03' + sum: '2.193e-02' grads.network.model.decoder.layers.2.final_layer_norm.weight: device: cuda:0 - max: '1.425e-02' - mean: '2.225e-05' - min: '-1.681e-02' + max: '1.06e-02' + mean: '1.349e-05' + min: '-3.724e-02' shape: - 1024 - sum: '2.278e-02' + sum: '1.382e-02' grads.network.model.decoder.layers.2.self_attn.k_proj.bias: device: cuda:0 - max: '7.276e-10' - mean: '2.105e-12' - min: '-6.403e-10' + max: '6.985e-10' + mean: '3.819e-13' + min: '-3.492e-10' shape: - 1024 - sum: '2.156e-09' + sum: '3.911e-10' grads.network.model.decoder.layers.2.self_attn.k_proj.weight: device: cuda:0 - max: '1.946e-02' - mean: '-5.407e-14' - min: '-1.651e-02' + max: '1.658e-02' + mean: '-6.373e-14' + min: '-1.493e-02' shape: - 1024 - 1024 - sum: '-5.669e-08' + sum: '-6.682e-08' grads.network.model.decoder.layers.2.self_attn.out_proj.bias: device: cuda:0 - max: '8.581e-03' - mean: '7.276e-12' - min: '-7.184e-03' + max: '9.061e-03' + mean: '1.455e-11' + min: '-9.315e-03' shape: - 1024 - sum: '7.451e-09' + sum: '1.490e-08' grads.network.model.decoder.layers.2.self_attn.out_proj.weight: device: cuda:0 - max: '6.802e-03' - mean: '-7.105e-14' - min: '-8.062e-03' + max: '9.092e-03' + mean: '-1.421e-14' + min: '-8.389e-03' shape: - 1024 - 1024 - sum: '-7.451e-08' + sum: '-1.490e-08' grads.network.model.decoder.layers.2.self_attn.q_proj.bias: device: cuda:0 - max: '7.422e-04' - mean: '8.642e-07' - min: '-7.440e-04' + max: '1.064e-03' + mean: '4.480e-06' + min: '-1.057e-03' shape: - 1024 - sum: '8.849e-04' + sum: '4.588e-03' grads.network.model.decoder.layers.2.self_attn.q_proj.weight: device: cuda:0 - max: '9.611e-03' - mean: '7.473e-09' - min: '-8.949e-03' + max: '9.205e-03' + mean: '3.874e-08' + min: '-1.268e-02' shape: - 1024 - 1024 - sum: '7.836e-03' + sum: '4.063e-02' grads.network.model.decoder.layers.2.self_attn.v_proj.bias: device: cuda:0 - max: '7.806e-03' - mean: '5.733e-05' - min: '-5.400e-03' + max: '8.063e-03' + mean: '3.71e-05' + min: '-6.821e-03' shape: - 1024 - sum: '5.871e-02' + sum: '3.799e-02' grads.network.model.decoder.layers.2.self_attn.v_proj.weight: device: cuda:0 - max: '1.255e-01' - mean: '4.958e-07' - min: '-1.039e-01' + max: '1.234e-01' + mean: '3.208e-07' + min: '-1.047e-01' shape: - 1024 - 1024 - sum: '5.199e-01' + sum: '3.364e-01' grads.network.model.decoder.layers.2.self_attn_layer_norm.bias: device: cuda:0 - max: '8.702e-03' - mean: '-3.180e-05' - min: '-7.398e-03' + max: '9.170e-03' + mean: '-3.405e-05' + min: '-9.528e-03' shape: - 1024 - sum: '-3.257e-02' + sum: '-3.486e-02' grads.network.model.decoder.layers.2.self_attn_layer_norm.weight: device: cuda:0 - max: '1.282e-02' - mean: '-7.960e-06' - min: '-9.967e-03' + max: '1.376e-02' + mean: '3.953e-06' + min: '-3.395e-02' shape: - 1024 - sum: '-8.151e-03' + sum: '4.048e-03' grads.network.model.decoder.layers.20.fc1.bias: device: cuda:0 - max: '7.021e-03' - mean: '-8.220e-07' - min: '-9.715e-03' + max: '7.671e-03' + mean: '-3.533e-07' + min: '-1.159e-02' shape: - 4096 - sum: '-3.367e-03' + sum: '-1.447e-03' grads.network.model.decoder.layers.20.fc1.weight: device: cuda:0 - max: '2.901e-01' - mean: '-2.468e-09' - min: '-2.366e-01' + max: '3.498e-01' + mean: '-1.061e-09' + min: '-2.271e-01' shape: - 4096 - 1024 - sum: '-1.035e-02' + sum: '-4.449e-03' grads.network.model.decoder.layers.20.fc2.bias: device: cuda:0 - max: '1.656e-02' + max: '1.901e-02' mean: '-1.455e-11' - min: '-1.602e-02' + min: '-1.83e-02' shape: - 1024 sum: '-1.490e-08' grads.network.model.decoder.layers.20.fc2.weight: device: cuda:0 - max: '5.451e-02' - mean: '0.e+00' - min: '-6.944e-02' + max: '8.356e-02' + mean: '5.684e-14' + min: '-8.36e-02' shape: - 1024 - 4096 - sum: '0.e+00' + sum: '2.384e-07' grads.network.model.decoder.layers.20.final_layer_norm.bias: device: cuda:0 - max: '1.946e-02' - mean: '1.441e-04' - min: '-1.843e-02' + max: '2.215e-02' + mean: '2.282e-04' + min: '-2.103e-02' shape: - 1024 - sum: '1.476e-01' + sum: '2.337e-01' grads.network.model.decoder.layers.20.final_layer_norm.weight: device: cuda:0 - max: '1.598e-02' - mean: '-4.830e-06' - min: '-1.877e-02' + max: '2.260e-02' + mean: '-2.262e-05' + min: '-1.660e-02' shape: - 1024 - sum: '-4.946e-03' + sum: '-2.316e-02' grads.network.model.decoder.layers.20.self_attn.k_proj.bias: device: cuda:0 - max: '3.201e-10' - mean: '-9.206e-13' - min: '-2.910e-10' + max: '3.492e-10' + mean: '1.942e-12' + min: '-3.347e-10' shape: - 1024 - sum: '-9.427e-10' + sum: '1.989e-09' grads.network.model.decoder.layers.20.self_attn.k_proj.weight: device: cuda:0 - max: '3.528e-02' - mean: '-4.058e-14' - min: '-3.229e-02' + max: '3.529e-02' + mean: '-4.73e-14' + min: '-3.390e-02' shape: - 1024 - 1024 - sum: '-4.255e-08' + sum: '-4.959e-08' grads.network.model.decoder.layers.20.self_attn.out_proj.bias: device: cuda:0 - max: '1.564e-02' - mean: '2.910e-11' - min: '-1.513e-02' + max: '1.786e-02' + mean: '1.455e-11' + min: '-1.611e-02' shape: - 1024 - sum: '2.980e-08' + sum: '1.490e-08' grads.network.model.decoder.layers.20.self_attn.out_proj.weight: device: cuda:0 - max: '8.664e-03' - mean: '-1.243e-13' - min: '-1.044e-02' + max: '8.450e-03' + mean: '-1.243e-14' + min: '-9.957e-03' shape: - 1024 - 1024 - sum: '-1.304e-07' + sum: '-1.304e-08' grads.network.model.decoder.layers.20.self_attn.q_proj.bias: device: cuda:0 - max: '1.403e-03' - mean: '1.494e-05' - min: '-1.552e-03' + max: '1.168e-03' + mean: '1.373e-05' + min: '-1.461e-03' shape: - 1024 - sum: '1.53e-02' + sum: '1.406e-02' grads.network.model.decoder.layers.20.self_attn.q_proj.weight: device: cuda:0 - max: '2.932e-02' - mean: '-1.382e-07' - min: '-3.542e-02' + max: '3.718e-02' + mean: '-1.270e-07' + min: '-3.829e-02' shape: - 1024 - 1024 - sum: '-1.449e-01' + sum: '-1.332e-01' grads.network.model.decoder.layers.20.self_attn.v_proj.bias: device: cuda:0 - max: '1.606e-02' - mean: '1.629e-04' - min: '-1.118e-02' + max: '1.316e-02' + mean: '1.595e-04' + min: '-1.22e-02' shape: - 1024 - sum: '1.668e-01' + sum: '1.634e-01' grads.network.model.decoder.layers.20.self_attn.v_proj.weight: device: cuda:0 - max: '3.505e-01' - mean: '-1.507e-06' - min: '-4.711e-01' + max: '3.578e-01' + mean: '-1.476e-06' + min: '-3.892e-01' shape: - 1024 - 1024 - sum: '-1.580e+00' + sum: '-1.548e+00' grads.network.model.decoder.layers.20.self_attn_layer_norm.bias: device: cuda:0 - max: '1.677e-02' - mean: '-2.002e-04' - min: '-1.659e-02' + max: '1.886e-02' + mean: '-2.963e-04' + min: '-1.759e-02' shape: - 1024 - sum: '-2.05e-01' + sum: '-3.034e-01' grads.network.model.decoder.layers.20.self_attn_layer_norm.weight: device: cuda:0 - max: '1.382e-02' - mean: '-9.212e-08' - min: '-1.511e-02' + max: '2.024e-02' + mean: '9.812e-07' + min: '-1.449e-02' shape: - 1024 - sum: '-9.433e-05' + sum: '1.005e-03' grads.network.model.decoder.layers.21.fc1.bias: device: cuda:0 - max: '1.186e-02' - mean: '-1.075e-05' - min: '-1.199e-02' + max: '1.159e-02' + mean: '-7.116e-06' + min: '-1.195e-02' shape: - 4096 - sum: '-4.403e-02' + sum: '-2.915e-02' grads.network.model.decoder.layers.21.fc1.weight: device: cuda:0 - max: '3.377e-01' - mean: '-3.392e-08' - min: '-3.296e-01' + max: '3.364e-01' + mean: '-2.245e-08' + min: '-3.275e-01' shape: - 4096 - 1024 - sum: '-1.423e-01' + sum: '-9.418e-02' grads.network.model.decoder.layers.21.fc2.bias: device: cuda:0 - max: '1.882e-02' - mean: '-1.819e-11' - min: '-1.813e-02' + max: '2.210e-02' + mean: '1.455e-11' + min: '-2.116e-02' shape: - 1024 - sum: '-1.863e-08' + sum: '1.490e-08' grads.network.model.decoder.layers.21.fc2.weight: device: cuda:0 - max: '6.899e-02' - mean: '-1.137e-13' - min: '-8.597e-02' + max: '1.082e-01' + mean: '-5.684e-14' + min: '-9.473e-02' shape: - 1024 - 4096 - sum: '-4.768e-07' + sum: '-2.384e-07' grads.network.model.decoder.layers.21.final_layer_norm.bias: device: cuda:0 - max: '2.098e-02' - mean: '6.844e-05' - min: '-2.03e-02' + max: '2.494e-02' + mean: '2.162e-05' + min: '-2.386e-02' shape: - 1024 - sum: '7.009e-02' + sum: '2.214e-02' grads.network.model.decoder.layers.21.final_layer_norm.weight: device: cuda:0 - max: '1.184e-02' - mean: '2.972e-05' - min: '-1.177e-02' + max: '2.376e-02' + mean: '7.015e-06' + min: '-1.133e-02' shape: - 1024 - sum: '3.043e-02' + sum: '7.184e-03' grads.network.model.decoder.layers.21.self_attn.k_proj.bias: device: cuda:0 - max: '4.075e-10' - mean: '1.086e-12' + max: '4.002e-10' + mean: '-1.572e-12' min: '-3.638e-10' shape: - 1024 - sum: '1.112e-09' + sum: '-1.61e-09' grads.network.model.decoder.layers.21.self_attn.k_proj.weight: device: cuda:0 - max: '2.804e-02' - mean: '9.459e-14' - min: '-3.453e-02' + max: '2.533e-02' + mean: '2.293e-13' + min: '-3.203e-02' shape: - 1024 - 1024 - sum: '9.919e-08' + sum: '2.405e-07' grads.network.model.decoder.layers.21.self_attn.out_proj.bias: device: cuda:0 - max: '1.878e-02' - mean: '-3.638e-12' - min: '-1.614e-02' + max: '1.854e-02' + mean: '0.e+00' + min: '-1.843e-02' shape: - 1024 - sum: '-3.725e-09' + sum: '0.e+00' grads.network.model.decoder.layers.21.self_attn.out_proj.weight: device: cuda:0 - max: '9.506e-03' - mean: '-4.263e-14' - min: '-8.713e-03' + max: '1.236e-02' + mean: '1.137e-13' + min: '-1.02e-02' shape: - 1024 - 1024 - sum: '-4.470e-08' + sum: '1.192e-07' grads.network.model.decoder.layers.21.self_attn.q_proj.bias: device: cuda:0 - max: '2.052e-03' - mean: '1.547e-05' - min: '-1.331e-03' + max: '1.768e-03' + mean: '1.468e-05' + min: '-1.166e-03' shape: - 1024 - sum: '1.584e-02' + sum: '1.503e-02' grads.network.model.decoder.layers.21.self_attn.q_proj.weight: device: cuda:0 - max: '1.767e-02' - mean: '-1.415e-07' - min: '-2.448e-02' + max: '1.766e-02' + mean: '-1.343e-07' + min: '-2.628e-02' shape: - 1024 - 1024 - sum: '-1.484e-01' + sum: '-1.408e-01' grads.network.model.decoder.layers.21.self_attn.v_proj.bias: device: cuda:0 - max: '1.497e-02' - mean: '5.043e-05' - min: '-1.445e-02' + max: '1.447e-02' + mean: '1.302e-05' + min: '-1.778e-02' shape: - 1024 - sum: '5.164e-02' + sum: '1.333e-02' grads.network.model.decoder.layers.21.self_attn.v_proj.weight: device: cuda:0 - max: '4.172e-01' - mean: '-4.614e-07' - min: '-4.140e-01' + max: '4.942e-01' + mean: '-1.191e-07' + min: '-4.252e-01' shape: - 1024 - 1024 - sum: '-4.838e-01' + sum: '-1.249e-01' grads.network.model.decoder.layers.21.self_attn_layer_norm.bias: device: cuda:0 - max: '2.011e-02' - mean: '-6.540e-05' - min: '-1.742e-02' + max: '1.995e-02' + mean: '1.246e-05' + min: '-1.996e-02' shape: - 1024 - sum: '-6.697e-02' + sum: '1.276e-02' grads.network.model.decoder.layers.21.self_attn_layer_norm.weight: device: cuda:0 - max: '1.288e-02' - mean: '-1.991e-06' - min: '-2.402e-02' + max: '2.301e-02' + mean: '1.724e-06' + min: '-1.395e-02' shape: - 1024 - sum: '-2.039e-03' + sum: '1.766e-03' grads.network.model.decoder.layers.22.fc1.bias: device: cuda:0 - max: '1.176e-02' - mean: '1.408e-05' - min: '-3.557e-02' + max: '1.418e-02' + mean: '1.925e-05' + min: '-3.796e-02' shape: - 4096 - sum: '5.766e-02' + sum: '7.886e-02' grads.network.model.decoder.layers.22.fc1.weight: device: cuda:0 - max: '4.620e-01' - mean: '1.121e-08' - min: '-3.343e-01' + max: '4.455e-01' + mean: '1.533e-08' + min: '-3.281e-01' shape: - 4096 - 1024 - sum: '4.700e-02' + sum: '6.429e-02' grads.network.model.decoder.layers.22.fc2.bias: device: cuda:0 - max: '1.839e-02' - mean: '-7.276e-12' - min: '-1.655e-02' + max: '2.107e-02' + mean: '-2.183e-11' + min: '-1.798e-02' shape: - 1024 - sum: '-7.451e-09' + sum: '-2.235e-08' grads.network.model.decoder.layers.22.fc2.weight: device: cuda:0 - max: '3.808e-02' - mean: '5.116e-13' - min: '-4.035e-02' + max: '3.631e-02' + mean: '-1.137e-13' + min: '-5.145e-02' shape: - 1024 - 4096 - sum: '2.146e-06' + sum: '-4.768e-07' grads.network.model.decoder.layers.22.final_layer_norm.bias: device: cuda:0 - max: '1.981e-02' - mean: '-1.515e-04' - min: '-1.822e-02' + max: '2.261e-02' + mean: '-3.098e-04' + min: '-1.996e-02' shape: - 1024 - sum: '-1.552e-01' + sum: '-3.173e-01' grads.network.model.decoder.layers.22.final_layer_norm.weight: device: cuda:0 - max: '7.739e-02' - mean: '5.868e-05' - min: '-8.369e-03' + max: '1.112e-01' + mean: '1.792e-05' + min: '-7.273e-03' shape: - 1024 - sum: '6.009e-02' + sum: '1.835e-02' grads.network.model.decoder.layers.22.self_attn.k_proj.bias: device: cuda:0 - max: '2.328e-10' - mean: '-8.422e-13' - min: '-3.056e-10' + max: '2.838e-10' + mean: '1.338e-12' + min: '-2.328e-10' shape: - 1024 - sum: '-8.624e-10' + sum: '1.37e-09' grads.network.model.decoder.layers.22.self_attn.k_proj.weight: device: cuda:0 - max: '1.37e-02' - mean: '-9.659e-15' - min: '-1.851e-02' + max: '1.521e-02' + mean: '-6.001e-14' + min: '-1.506e-02' shape: - 1024 - 1024 - sum: '-1.013e-08' + sum: '-6.292e-08' grads.network.model.decoder.layers.22.self_attn.out_proj.bias: device: cuda:0 - max: '1.504e-02' - mean: '-1.819e-11' - min: '-1.527e-02' + max: '1.797e-02' + mean: '2.910e-11' + min: '-1.645e-02' shape: - 1024 - sum: '-1.863e-08' + sum: '2.980e-08' grads.network.model.decoder.layers.22.self_attn.out_proj.weight: device: cuda:0 - max: '3.731e-03' - mean: '-5.684e-14' - min: '-4.715e-03' + max: '1.489e-02' + mean: '-2.132e-13' + min: '-1.383e-02' shape: - 1024 - 1024 - sum: '-5.960e-08' + sum: '-2.235e-07' grads.network.model.decoder.layers.22.self_attn.q_proj.bias: device: cuda:0 - max: '1.386e-03' - mean: '-1.428e-05' - min: '-1.402e-03' + max: '1.432e-03' + mean: '-1.077e-05' + min: '-1.380e-03' shape: - 1024 - sum: '-1.463e-02' + sum: '-1.103e-02' grads.network.model.decoder.layers.22.self_attn.q_proj.weight: device: cuda:0 - max: '1.612e-02' - mean: '8.246e-08' - min: '-1.700e-02' + max: '1.757e-02' + mean: '6.216e-08' + min: '-1.876e-02' shape: - 1024 - 1024 - sum: '8.646e-02' + sum: '6.518e-02' grads.network.model.decoder.layers.22.self_attn.v_proj.bias: device: cuda:0 - max: '1.086e-02' - mean: '6.069e-05' - min: '-1.123e-02' + max: '1.04e-02' + mean: '9.040e-05' + min: '-1.207e-02' shape: - 1024 - sum: '6.215e-02' + sum: '9.257e-02' grads.network.model.decoder.layers.22.self_attn.v_proj.weight: device: cuda:0 - max: '2.964e-01' - mean: '-3.504e-07' - min: '-3.047e-01' + max: '3.492e-01' + mean: '-5.219e-07' + min: '-2.943e-01' shape: - 1024 - 1024 - sum: '-3.674e-01' + sum: '-5.472e-01' grads.network.model.decoder.layers.22.self_attn_layer_norm.bias: device: cuda:0 - max: '1.571e-02' - mean: '-3.789e-05' - min: '-1.599e-02' + max: '1.879e-02' + mean: '-5.430e-05' + min: '-1.734e-02' shape: - 1024 - sum: '-3.88e-02' + sum: '-5.561e-02' grads.network.model.decoder.layers.22.self_attn_layer_norm.weight: device: cuda:0 - max: '7.293e-03' - mean: '-4.794e-06' - min: '-3.830e-02' + max: '1.860e-02' + mean: '-1.348e-05' + min: '-3.154e-02' shape: - 1024 - sum: '-4.909e-03' + sum: '-1.380e-02' grads.network.model.decoder.layers.23.fc1.bias: device: cuda:0 - max: '1.824e-02' - mean: '2.643e-05' - min: '-1.31e-02' + max: '1.947e-02' + mean: '2.517e-05' + min: '-1.008e-02' shape: - 4096 - sum: '1.083e-01' + sum: '1.031e-01' grads.network.model.decoder.layers.23.fc1.weight: device: cuda:0 - max: '1.479e-01' - mean: '4.495e-08' - min: '-2.167e-01' + max: '1.458e-01' + mean: '4.279e-08' + min: '-2.653e-01' shape: - 4096 - 1024 - sum: '1.885e-01' + sum: '1.795e-01' grads.network.model.decoder.layers.23.fc2.bias: device: cuda:0 - max: '9.662e-03' + max: '9.512e-03' mean: '1.819e-12' - min: '-1.207e-02' + min: '-9.348e-03' shape: - 1024 sum: '1.863e-09' grads.network.model.decoder.layers.23.fc2.weight: device: cuda:0 - max: '2.020e-02' - mean: '6.821e-13' - min: '-1.904e-02' + max: '2.092e-02' + mean: '-4.547e-13' + min: '-1.892e-02' shape: - 1024 - 4096 - sum: '2.861e-06' + sum: '-1.907e-06' grads.network.model.decoder.layers.23.final_layer_norm.bias: device: cuda:0 - max: '1.025e-02' - mean: '1.452e-04' - min: '-1.192e-02' + max: '1.005e-02' + mean: '-9.368e-05' + min: '-9.654e-03' shape: - 1024 - sum: '1.487e-01' + sum: '-9.593e-02' grads.network.model.decoder.layers.23.final_layer_norm.weight: device: cuda:0 - max: '9.743e-03' - mean: '3.538e-04' - min: '-1.162e-02' + max: '9.125e-03' + mean: '2.809e-04' + min: '-8.498e-03' shape: - 1024 - sum: '3.623e-01' + sum: '2.876e-01' grads.network.model.decoder.layers.23.self_attn.k_proj.bias: device: cuda:0 - max: '5.821e-10' - mean: '1.369e-12' - min: '-4.948e-10' + max: '1.048e-09' + mean: '-2.047e-13' + min: '-1.513e-09' shape: - 1024 - sum: '1.402e-09' + sum: '-2.096e-10' grads.network.model.decoder.layers.23.self_attn.k_proj.weight: device: cuda:0 - max: '7.675e-02' - mean: '1.814e-13' - min: '-9.45e-02' + max: '7.757e-02' + mean: '-1.006e-13' + min: '-1.167e-01' shape: - 1024 - 1024 - sum: '1.902e-07' + sum: '-1.055e-07' grads.network.model.decoder.layers.23.self_attn.out_proj.bias: device: cuda:0 - max: '8.239e-03' - mean: '1.819e-12' - min: '-9.641e-03' + max: '9.025e-03' + mean: '-5.457e-12' + min: '-8.085e-03' shape: - 1024 - sum: '1.863e-09' + sum: '-5.588e-09' grads.network.model.decoder.layers.23.self_attn.out_proj.weight: device: cuda:0 - max: '3.845e-03' - mean: '9.592e-14' - min: '-4.001e-03' + max: '4.444e-03' + mean: '-6.395e-14' + min: '-4.31e-03' shape: - 1024 - 1024 - sum: '1.006e-07' + sum: '-6.706e-08' grads.network.model.decoder.layers.23.self_attn.q_proj.bias: device: cuda:0 - max: '6.886e-03' - mean: '5.062e-05' - min: '-5.236e-03' + max: '6.065e-03' + mean: '3.442e-05' + min: '-5.142e-03' shape: - 1024 - sum: '5.183e-02' + sum: '3.525e-02' grads.network.model.decoder.layers.23.self_attn.q_proj.weight: device: cuda:0 - max: '6.223e-02' - mean: '-2.422e-07' - min: '-8.140e-02' + max: '7.615e-02' + mean: '-1.647e-07' + min: '-8.673e-02' shape: - 1024 - 1024 - sum: '-2.54e-01' + sum: '-1.727e-01' grads.network.model.decoder.layers.23.self_attn.v_proj.bias: device: cuda:0 - max: '1.707e-02' - mean: '-3.691e-05' - min: '-1.682e-02' + max: '1.326e-02' + mean: '-5.18e-05' + min: '-1.957e-02' shape: - 1024 - sum: '-3.78e-02' + sum: '-5.304e-02' grads.network.model.decoder.layers.23.self_attn.v_proj.weight: device: cuda:0 - max: '4.430e-01' - mean: '1.766e-07' - min: '-4.232e-01' + max: '5.156e-01' + mean: '2.478e-07' + min: '-3.333e-01' shape: - 1024 - 1024 - sum: '1.852e-01' + sum: '2.599e-01' grads.network.model.decoder.layers.23.self_attn_layer_norm.bias: device: cuda:0 - max: '8.470e-03' - mean: '1.14e-04' - min: '-9.558e-03' + max: '9.140e-03' + mean: '1.168e-04' + min: '-7.772e-03' shape: - 1024 - sum: '1.167e-01' + sum: '1.196e-01' grads.network.model.decoder.layers.23.self_attn_layer_norm.weight: device: cuda:0 - max: '5.296e-03' - mean: '-2.35e-05' - min: '-2.633e-02' + max: '5.779e-03' + mean: '4.173e-06' + min: '-1.385e-02' shape: - 1024 - sum: '-2.406e-02' + sum: '4.273e-03' grads.network.model.decoder.layers.3.fc1.bias: device: cuda:0 - max: '6.73e-03' - mean: '9.586e-07' - min: '-5.137e-03' + max: '5.954e-03' + mean: '1.316e-05' + min: '-8.344e-03' shape: - 4096 - sum: '3.927e-03' + sum: '5.389e-02' grads.network.model.decoder.layers.3.fc1.weight: device: cuda:0 - max: '1.203e-01' - mean: '-4.455e-10' - min: '-1.103e-01' + max: '1.064e-01' + mean: '-6.116e-09' + min: '-9.593e-02' shape: - 4096 - 1024 - sum: '-1.869e-03' + sum: '-2.565e-02' grads.network.model.decoder.layers.3.fc2.bias: device: cuda:0 - max: '7.579e-03' - mean: '-7.276e-12' - min: '-8.140e-03' + max: '8.140e-03' + mean: '-3.638e-12' + min: '-1.140e-02' shape: - 1024 - sum: '-7.451e-09' + sum: '-3.725e-09' grads.network.model.decoder.layers.3.fc2.weight: device: cuda:0 - max: '1.234e-02' - mean: '-2.274e-13' - min: '-1.24e-02' + max: '1.384e-02' + mean: '4.547e-13' + min: '-1.706e-02' shape: - 1024 - 4096 - sum: '-9.537e-07' + sum: '1.907e-06' grads.network.model.decoder.layers.3.final_layer_norm.bias: device: cuda:0 - max: '8.515e-03' - mean: '1.464e-04' - min: '-8.444e-03' + max: '9.449e-03' + mean: '2.546e-05' + min: '-1.205e-02' shape: - 1024 - sum: '1.499e-01' + sum: '2.607e-02' grads.network.model.decoder.layers.3.final_layer_norm.weight: device: cuda:0 - max: '2.337e-02' - mean: '-2.308e-05' - min: '-9.225e-03' + max: '2.066e-02' + mean: '-4.079e-05' + min: '-3.198e-02' shape: - 1024 - sum: '-2.364e-02' + sum: '-4.177e-02' grads.network.model.decoder.layers.3.self_attn.k_proj.bias: device: cuda:0 - max: '2.910e-10' - mean: '4.927e-13' - min: '-5.239e-10' + max: '3.056e-10' + mean: '-1.023e-12' + min: '-2.983e-10' shape: - 1024 - sum: '5.045e-10' + sum: '-1.047e-09' grads.network.model.decoder.layers.3.self_attn.k_proj.weight: device: cuda:0 - max: '2.496e-02' - mean: '8.982e-14' - min: '-2.865e-02' + max: '1.167e-02' + mean: '-1.421e-14' + min: '-1.363e-02' shape: - 1024 - 1024 - sum: '9.418e-08' + sum: '-1.490e-08' grads.network.model.decoder.layers.3.self_attn.out_proj.bias: device: cuda:0 - max: '7.812e-03' - mean: '0.e+00' - min: '-9.081e-03' + max: '7.554e-03' + mean: '1.819e-11' + min: '-1.130e-02' shape: - 1024 - sum: '0.e+00' + sum: '1.863e-08' grads.network.model.decoder.layers.3.self_attn.out_proj.weight: device: cuda:0 - max: '1.240e-02' - mean: '-3.375e-14' - min: '-8.509e-03' + max: '1.395e-02' + mean: '7.105e-14' + min: '-9.944e-03' shape: - 1024 - 1024 - sum: '-3.539e-08' + sum: '7.451e-08' grads.network.model.decoder.layers.3.self_attn.q_proj.bias: device: cuda:0 - max: '3.278e-03' - mean: '4.885e-06' - min: '-1.355e-03' + max: '1.262e-03' + mean: '1.523e-05' + min: '-1.661e-03' shape: - 1024 - sum: '5.002e-03' + sum: '1.560e-02' grads.network.model.decoder.layers.3.self_attn.q_proj.weight: device: cuda:0 - max: '2.716e-02' - mean: '4.467e-08' - min: '-1.491e-02' + max: '1.264e-02' + mean: '1.393e-07' + min: '-1.569e-02' shape: - 1024 - 1024 - sum: '4.684e-02' + sum: '1.461e-01' grads.network.model.decoder.layers.3.self_attn.v_proj.bias: device: cuda:0 - max: '6.426e-03' - mean: '6.080e-05' - min: '-6.945e-03' + max: '6.315e-03' + mean: '3.350e-05' + min: '-1.044e-02' shape: - 1024 - sum: '6.226e-02' + sum: '3.431e-02' grads.network.model.decoder.layers.3.self_attn.v_proj.weight: device: cuda:0 - max: '1.024e-01' - mean: '5.56e-07' - min: '-1.103e-01' + max: '1.511e-01' + mean: '3.064e-07' + min: '-1.489e-01' shape: - 1024 - 1024 - sum: '5.830e-01' + sum: '3.212e-01' grads.network.model.decoder.layers.3.self_attn_layer_norm.bias: device: cuda:0 - max: '7.975e-03' - mean: '-3.111e-06' - min: '-9.224e-03' + max: '7.629e-03' + mean: '2.019e-05' + min: '-1.149e-02' shape: - 1024 - sum: '-3.186e-03' + sum: '2.068e-02' grads.network.model.decoder.layers.3.self_attn_layer_norm.weight: device: cuda:0 - max: '1.342e-02' - mean: '4.895e-07' - min: '-1.343e-02' + max: '1.384e-02' + mean: '1.535e-06' + min: '-3.271e-02' shape: - 1024 - sum: '5.013e-04' + sum: '1.572e-03' grads.network.model.decoder.layers.4.fc1.bias: device: cuda:0 - max: '4.634e-03' - mean: '-4.954e-06' - min: '-6.032e-03' + max: '8.716e-03' + mean: '-6.134e-06' + min: '-3.885e-03' shape: - 4096 - sum: '-2.029e-02' + sum: '-2.513e-02' grads.network.model.decoder.layers.4.fc1.weight: device: cuda:0 - max: '1.05e-01' - mean: '-9.529e-10' - min: '-1.201e-01' + max: '9.354e-02' + mean: '-1.18e-09' + min: '-1.037e-01' shape: - 4096 - 1024 - sum: '-3.997e-03' + sum: '-4.948e-03' grads.network.model.decoder.layers.4.fc2.bias: device: cuda:0 - max: '7.079e-03' - mean: '-7.276e-12' - min: '-7.644e-03' + max: '7.127e-03' + mean: '-1.455e-11' + min: '-8.873e-03' shape: - 1024 - sum: '-7.451e-09' + sum: '-1.490e-08' grads.network.model.decoder.layers.4.fc2.weight: device: cuda:0 - max: '8.690e-03' - mean: '3.411e-13' - min: '-1.055e-02' + max: '1.011e-02' + mean: '-2.274e-13' + min: '-1.157e-02' shape: - 1024 - 4096 - sum: '1.431e-06' + sum: '-9.537e-07' grads.network.model.decoder.layers.4.final_layer_norm.bias: device: cuda:0 - max: '8.031e-03' - mean: '-2.691e-05' - min: '-8.824e-03' + max: '7.855e-03' + mean: '-2.88e-05' + min: '-9.680e-03' shape: - 1024 - sum: '-2.756e-02' + sum: '-2.949e-02' grads.network.model.decoder.layers.4.final_layer_norm.weight: device: cuda:0 - max: '1.963e-02' - mean: '1.291e-05' - min: '-1.28e-02' + max: '1.503e-02' + mean: '1.502e-06' + min: '-1.015e-02' shape: - 1024 - sum: '1.322e-02' + sum: '1.538e-03' grads.network.model.decoder.layers.4.self_attn.k_proj.bias: device: cuda:0 - max: '4.366e-10' - mean: '3.982e-12' - min: '-2.256e-10' + max: '4.511e-10' + mean: '-4.124e-12' + min: '-2.838e-10' shape: - 1024 - sum: '4.077e-09' + sum: '-4.223e-09' grads.network.model.decoder.layers.4.self_attn.k_proj.weight: device: cuda:0 - max: '2.148e-02' - mean: '2.665e-14' - min: '-2.816e-02' + max: '2.309e-02' + mean: '-2.882e-13' + min: '-2.746e-02' shape: - 1024 - 1024 - sum: '2.794e-08' + sum: '-3.022e-07' grads.network.model.decoder.layers.4.self_attn.out_proj.bias: device: cuda:0 - max: '7.798e-03' - mean: '1.455e-11' - min: '-8.227e-03' + max: '7.763e-03' + mean: '-7.276e-12' + min: '-1.027e-02' shape: - 1024 - sum: '1.490e-08' + sum: '-7.451e-09' grads.network.model.decoder.layers.4.self_attn.out_proj.weight: device: cuda:0 - max: '9.723e-03' - mean: '5.684e-14' - min: '-1.093e-02' + max: '1.258e-02' + mean: '-5.684e-14' + min: '-8.443e-03' shape: - 1024 - 1024 - sum: '5.960e-08' + sum: '-5.960e-08' grads.network.model.decoder.layers.4.self_attn.q_proj.bias: device: cuda:0 - max: '1.283e-03' - mean: '6.846e-06' - min: '-9.64e-04' + max: '1.406e-03' + mean: '8.718e-06' + min: '-1.263e-03' shape: - 1024 - sum: '7.010e-03' + sum: '8.927e-03' grads.network.model.decoder.layers.4.self_attn.q_proj.weight: device: cuda:0 - max: '1.396e-02' - mean: '4.487e-08' - min: '-1.042e-02' + max: '1.614e-02' + mean: '5.714e-08' + min: '-1.253e-02' shape: - 1024 - 1024 - sum: '4.705e-02' + sum: '5.992e-02' grads.network.model.decoder.layers.4.self_attn.v_proj.bias: device: cuda:0 - max: '6.888e-03' - mean: '1.623e-05' - min: '-6.609e-03' + max: '7.103e-03' + mean: '4.113e-05' + min: '-7.943e-03' shape: - 1024 - sum: '1.662e-02' + sum: '4.212e-02' grads.network.model.decoder.layers.4.self_attn.v_proj.weight: device: cuda:0 - max: '1.618e-01' - mean: '1.064e-07' - min: '-1.498e-01' + max: '1.551e-01' + mean: '2.696e-07' + min: '-1.392e-01' shape: - 1024 - 1024 - sum: '1.115e-01' + sum: '2.827e-01' grads.network.model.decoder.layers.4.self_attn_layer_norm.bias: device: cuda:0 - max: '8.009e-03' - mean: '1.273e-09' - min: '-8.459e-03' + max: '8.028e-03' + mean: '7.166e-06' + min: '-1.046e-02' shape: - 1024 - sum: '1.304e-06' + sum: '7.338e-03' grads.network.model.decoder.layers.4.self_attn_layer_norm.weight: device: cuda:0 - max: '1.273e-02' - mean: '-2.657e-06' - min: '-1.02e-02' + max: '8.643e-03' + mean: '-1.091e-05' + min: '-2.483e-02' shape: - 1024 - sum: '-2.721e-03' + sum: '-1.117e-02' grads.network.model.decoder.layers.5.fc1.bias: device: cuda:0 - max: '3.97e-03' - mean: '2.958e-06' - min: '-5.305e-03' + max: '4.748e-03' + mean: '4.587e-06' + min: '-5.883e-03' shape: - 4096 - sum: '1.211e-02' + sum: '1.879e-02' grads.network.model.decoder.layers.5.fc1.weight: device: cuda:0 - max: '9.081e-02' - mean: '-1.418e-09' - min: '-9.728e-02' + max: '9.723e-02' + mean: '-2.199e-09' + min: '-1.125e-01' shape: - 4096 - 1024 - sum: '-5.947e-03' + sum: '-9.221e-03' grads.network.model.decoder.layers.5.fc2.bias: device: cuda:0 - max: '6.957e-03' - mean: '-2.183e-11' - min: '-8.184e-03' + max: '7.651e-03' + mean: '2.183e-11' + min: '-1.023e-02' shape: - 1024 - sum: '-2.235e-08' + sum: '2.235e-08' grads.network.model.decoder.layers.5.fc2.weight: device: cuda:0 - max: '1.459e-02' - mean: '-4.832e-13' - min: '-1.745e-02' + max: '1.427e-02' + mean: '4.547e-13' + min: '-1.743e-02' shape: - 1024 - 4096 - sum: '-2.027e-06' + sum: '1.907e-06' grads.network.model.decoder.layers.5.final_layer_norm.bias: device: cuda:0 - max: '7.481e-03' - mean: '-5.331e-05' - min: '-8.873e-03' + max: '8.459e-03' + mean: '-6.824e-05' + min: '-1.104e-02' shape: - 1024 - sum: '-5.459e-02' + sum: '-6.988e-02' grads.network.model.decoder.layers.5.final_layer_norm.weight: device: cuda:0 - max: '2.771e-02' - mean: '3.359e-05' - min: '-9.695e-03' + max: '2.276e-02' + mean: '1.546e-05' + min: '-1.198e-02' shape: - 1024 - sum: '3.439e-02' + sum: '1.583e-02' grads.network.model.decoder.layers.5.self_attn.k_proj.bias: device: cuda:0 - max: '5.093e-10' - mean: '3.512e-12' - min: '-6.403e-10' + max: '4.366e-10' + mean: '2.527e-12' + min: '-3.929e-10' shape: - 1024 - sum: '3.596e-09' + sum: '2.588e-09' grads.network.model.decoder.layers.5.self_attn.k_proj.weight: device: cuda:0 - max: '1.978e-02' - mean: '4.297e-14' - min: '-3.209e-02' + max: '2.063e-02' + mean: '6.717e-14' + min: '-1.871e-02' shape: - 1024 - 1024 - sum: '4.505e-08' + sum: '7.043e-08' grads.network.model.decoder.layers.5.self_attn.out_proj.bias: device: cuda:0 - max: '8.798e-03' - mean: '-1.455e-11' - min: '-9.078e-03' + max: '7.647e-03' + mean: '1.455e-11' + min: '-1.1e-02' shape: - 1024 - sum: '-1.490e-08' + sum: '1.490e-08' grads.network.model.decoder.layers.5.self_attn.out_proj.weight: device: cuda:0 - max: '8.847e-03' - mean: '4.405e-13' - min: '-8.859e-03' + max: '1.146e-02' + mean: '-1.137e-13' + min: '-7.558e-03' shape: - 1024 - 1024 - sum: '4.619e-07' + sum: '-1.192e-07' grads.network.model.decoder.layers.5.self_attn.q_proj.bias: device: cuda:0 - max: '2.318e-03' - mean: '-6.482e-07' - min: '-1.228e-03' + max: '1.232e-03' + mean: '5.46e-06' + min: '-1.171e-03' shape: - 1024 - sum: '-6.637e-04' + sum: '5.591e-03' grads.network.model.decoder.layers.5.self_attn.q_proj.weight: device: cuda:0 - max: '3.321e-02' - mean: '-1.654e-09' - min: '-1.745e-02' + max: '1.892e-02' + mean: '1.393e-08' + min: '-1.640e-02' shape: - 1024 - 1024 - sum: '-1.734e-03' + sum: '1.461e-02' grads.network.model.decoder.layers.5.self_attn.v_proj.bias: device: cuda:0 - max: '8.895e-03' - mean: '1.324e-05' - min: '-8.022e-03' + max: '7.63e-03' + mean: '2.826e-05' + min: '-6.905e-03' shape: - 1024 - sum: '1.356e-02' + sum: '2.894e-02' grads.network.model.decoder.layers.5.self_attn.v_proj.weight: device: cuda:0 - max: '1.966e-01' - mean: '3.378e-08' - min: '-1.69e-01' + max: '1.549e-01' + mean: '7.210e-08' + min: '-1.564e-01' shape: - 1024 - 1024 - sum: '3.542e-02' + sum: '7.561e-02' grads.network.model.decoder.layers.5.self_attn_layer_norm.bias: device: cuda:0 - max: '8.963e-03' - mean: '-2.705e-05' - min: '-9.332e-03' + max: '7.75e-03' + mean: '-6.064e-05' + min: '-1.140e-02' shape: - 1024 - sum: '-2.77e-02' + sum: '-6.21e-02' grads.network.model.decoder.layers.5.self_attn_layer_norm.weight: device: cuda:0 - max: '1.668e-02' - mean: '-1.905e-06' - min: '-1.146e-02' + max: '1.310e-02' + mean: '-7.533e-06' + min: '-1.207e-02' shape: - 1024 - sum: '-1.950e-03' + sum: '-7.714e-03' grads.network.model.decoder.layers.6.fc1.bias: device: cuda:0 - max: '1.257e-02' - mean: '-1.086e-05' - min: '-6.298e-03' + max: '8.689e-03' + mean: '-1.853e-05' + min: '-5.812e-03' shape: - 4096 - sum: '-4.448e-02' + sum: '-7.588e-02' grads.network.model.decoder.layers.6.fc1.weight: device: cuda:0 - max: '1.290e-01' - mean: '1.517e-11' - min: '-1.668e-01' + max: '1.247e-01' + mean: '2.587e-11' + min: '-1.671e-01' shape: - 4096 - 1024 - sum: '6.362e-05' + sum: '1.085e-04' grads.network.model.decoder.layers.6.fc2.bias: device: cuda:0 - max: '9.356e-03' - mean: '4.366e-11' - min: '-9.007e-03' + max: '8.694e-03' + mean: '-3.638e-12' + min: '-8.964e-03' shape: - 1024 - sum: '4.470e-08' + sum: '-3.725e-09' grads.network.model.decoder.layers.6.fc2.weight: device: cuda:0 - max: '2.506e-02' - mean: '5.969e-13' - min: '-2.432e-02' + max: '2.818e-02' + mean: '-1.99e-13' + min: '-2.423e-02' shape: - 1024 - 4096 - sum: '2.503e-06' + sum: '-8.345e-07' grads.network.model.decoder.layers.6.final_layer_norm.bias: device: cuda:0 - max: '1.005e-02' - mean: '3.235e-05' - min: '-9.823e-03' + max: '9.466e-03' + mean: '1.768e-05' + min: '-9.583e-03' shape: - 1024 - sum: '3.312e-02' + sum: '1.811e-02' grads.network.model.decoder.layers.6.final_layer_norm.weight: device: cuda:0 - max: '4.029e-02' - mean: '7.093e-06' - min: '-1.064e-02' + max: '3.202e-02' + mean: '1.739e-05' + min: '-1.373e-02' shape: - 1024 - sum: '7.264e-03' + sum: '1.780e-02' grads.network.model.decoder.layers.6.self_attn.k_proj.bias: device: cuda:0 - max: '2.212e-09' - mean: '2.743e-12' - min: '-4.657e-10' + max: '1.048e-09' + mean: '2.847e-12' + min: '-5.821e-10' shape: - 1024 - sum: '2.809e-09' + sum: '2.915e-09' grads.network.model.decoder.layers.6.self_attn.k_proj.weight: device: cuda:0 - max: '5.747e-02' - mean: '-1.987e-13' - min: '-6.243e-02' + max: '7.468e-02' + mean: '3.264e-14' + min: '-7.459e-02' shape: - 1024 - 1024 - sum: '-2.084e-07' + sum: '3.423e-08' grads.network.model.decoder.layers.6.self_attn.out_proj.bias: device: cuda:0 - max: '8.222e-03' - mean: '7.276e-12' - min: '-7.921e-03' + max: '9.673e-03' + mean: '-7.276e-12' + min: '-9.632e-03' shape: - 1024 - sum: '7.451e-09' + sum: '-7.451e-09' grads.network.model.decoder.layers.6.self_attn.out_proj.weight: device: cuda:0 - max: '7.939e-03' - mean: '8.527e-14' - min: '-1.069e-02' + max: '1.069e-02' + mean: '-2.558e-13' + min: '-1.237e-02' shape: - 1024 - 1024 - sum: '8.941e-08' + sum: '-2.682e-07' grads.network.model.decoder.layers.6.self_attn.q_proj.bias: device: cuda:0 - max: '1.656e-03' - mean: '-7.843e-06' - min: '-2.958e-03' + max: '1.893e-03' + mean: '-1.271e-05' + min: '-3.243e-03' shape: - 1024 - sum: '-8.031e-03' + sum: '-1.302e-02' grads.network.model.decoder.layers.6.self_attn.q_proj.weight: device: cuda:0 - max: '2.914e-02' - mean: '-3.261e-09' - min: '-2.954e-02' + max: '4.317e-02' + mean: '-5.287e-09' + min: '-5.174e-02' shape: - 1024 - 1024 - sum: '-3.42e-03' + sum: '-5.543e-03' grads.network.model.decoder.layers.6.self_attn.v_proj.bias: device: cuda:0 - max: '5.932e-03' - mean: '1.089e-04' - min: '-5.01e-03' + max: '6.756e-03' + mean: '8.55e-05' + min: '-5.219e-03' shape: - 1024 - sum: '1.115e-01' + sum: '8.755e-02' grads.network.model.decoder.layers.6.self_attn.v_proj.weight: device: cuda:0 - max: '1.312e-01' - mean: '4.527e-08' - min: '-1.643e-01' + max: '1.221e-01' + mean: '3.555e-08' + min: '-1.883e-01' shape: - 1024 - 1024 - sum: '4.747e-02' + sum: '3.728e-02' grads.network.model.decoder.layers.6.self_attn_layer_norm.bias: device: cuda:0 - max: '8.551e-03' - mean: '9.577e-06' - min: '-8.239e-03' + max: '1.004e-02' + mean: '2.542e-06' + min: '-9.872e-03' shape: - 1024 - sum: '9.807e-03' + sum: '2.603e-03' grads.network.model.decoder.layers.6.self_attn_layer_norm.weight: device: cuda:0 - max: '3.59e-02' - mean: '-3.938e-06' - min: '-9.743e-03' + max: '2.376e-02' + mean: '-1.475e-05' + min: '-1.311e-02' shape: - 1024 - sum: '-4.032e-03' + sum: '-1.511e-02' grads.network.model.decoder.layers.7.fc1.bias: device: cuda:0 - max: '9.245e-03' - mean: '-1.028e-05' - min: '-5.297e-03' + max: '1.040e-02' + mean: '-1.111e-05' + min: '-5.846e-03' shape: - 4096 - sum: '-4.213e-02' + sum: '-4.551e-02' grads.network.model.decoder.layers.7.fc1.weight: device: cuda:0 - max: '1.104e-01' - mean: '-1.882e-09' - min: '-2.285e-01' + max: '1.282e-01' + mean: '-2.034e-09' + min: '-2.541e-01' shape: - 4096 - 1024 - sum: '-7.895e-03' + sum: '-8.530e-03' grads.network.model.decoder.layers.7.fc2.bias: device: cuda:0 - max: '1.005e-02' - mean: '1.455e-11' - min: '-9.898e-03' + max: '8.647e-03' + mean: '-1.819e-12' + min: '-1.108e-02' shape: - 1024 - sum: '1.490e-08' + sum: '-1.863e-09' grads.network.model.decoder.layers.7.fc2.weight: device: cuda:0 - max: '1.995e-02' - mean: '2.274e-13' - min: '-2.254e-02' + max: '2.036e-02' + mean: '-2.274e-13' + min: '-2.125e-02' shape: - 1024 - 4096 - sum: '9.537e-07' + sum: '-9.537e-07' grads.network.model.decoder.layers.7.final_layer_norm.bias: device: cuda:0 - max: '1.121e-02' - mean: '7.444e-05' - min: '-1.076e-02' + max: '9.436e-03' + mean: '1.051e-04' + min: '-1.201e-02' shape: - 1024 - sum: '7.622e-02' + sum: '1.076e-01' grads.network.model.decoder.layers.7.final_layer_norm.weight: device: cuda:0 - max: '3.652e-02' - mean: '8.827e-06' - min: '-1.238e-02' + max: '2.502e-02' + mean: '-2.608e-06' + min: '-1.341e-02' shape: - 1024 - sum: '9.038e-03' + sum: '-2.670e-03' grads.network.model.decoder.layers.7.self_attn.k_proj.bias: device: cuda:0 - max: '9.313e-10' - mean: '3.886e-12' - min: '-3.347e-10' + max: '4.075e-10' + mean: '1.863e-13' + min: '-3.492e-10' shape: - 1024 - sum: '3.979e-09' + sum: '1.908e-10' grads.network.model.decoder.layers.7.self_attn.k_proj.weight: device: cuda:0 - max: '4.476e-02' - mean: '-3.036e-14' - min: '-3.419e-02' + max: '3.309e-02' + mean: '6.817e-14' + min: '-4.19e-02' shape: - 1024 - 1024 - sum: '-3.184e-08' + sum: '7.148e-08' grads.network.model.decoder.layers.7.self_attn.out_proj.bias: device: cuda:0 - max: '9.546e-03' - mean: '2.910e-11' - min: '-8.879e-03' + max: '7.477e-03' + mean: '-5.457e-12' + min: '-9.228e-03' shape: - 1024 - sum: '2.980e-08' + sum: '-5.588e-09' grads.network.model.decoder.layers.7.self_attn.out_proj.weight: device: cuda:0 - max: '1.048e-02' - mean: '-4.974e-14' - min: '-8.69e-03' + max: '1.003e-02' + mean: '-1.563e-13' + min: '-7.771e-03' shape: - 1024 - 1024 - sum: '-5.215e-08' + sum: '-1.639e-07' grads.network.model.decoder.layers.7.self_attn.q_proj.bias: device: cuda:0 - max: '2.16e-03' - mean: '-8.566e-06' - min: '-2.123e-03' + max: '2.209e-03' + mean: '-4.411e-06' + min: '-1.604e-03' shape: - 1024 - sum: '-8.771e-03' + sum: '-4.517e-03' grads.network.model.decoder.layers.7.self_attn.q_proj.weight: device: cuda:0 - max: '4.079e-02' - mean: '1.162e-09' - min: '-3.934e-02' + max: '3.379e-02' + mean: '5.986e-10' + min: '-2.946e-02' shape: - 1024 - 1024 - sum: '1.218e-03' + sum: '6.277e-04' grads.network.model.decoder.layers.7.self_attn.v_proj.bias: device: cuda:0 - max: '7.006e-03' - mean: '7.293e-05' - min: '-6.243e-03' + max: '6.926e-03' + mean: '5.966e-05' + min: '-6.282e-03' shape: - 1024 - sum: '7.468e-02' + sum: '6.109e-02' grads.network.model.decoder.layers.7.self_attn.v_proj.weight: device: cuda:0 - max: '1.412e-01' - mean: '-9.893e-09' - min: '-1.577e-01' + max: '1.424e-01' + mean: '-8.094e-09' + min: '-1.385e-01' shape: - 1024 - 1024 - sum: '-1.037e-02' + sum: '-8.487e-03' grads.network.model.decoder.layers.7.self_attn_layer_norm.bias: device: cuda:0 - max: '1.008e-02' - mean: '7.626e-05' - min: '-8.979e-03' + max: '7.795e-03' + mean: '8.083e-05' + min: '-9.428e-03' shape: - 1024 - sum: '7.809e-02' + sum: '8.277e-02' grads.network.model.decoder.layers.7.self_attn_layer_norm.weight: device: cuda:0 - max: '4.077e-02' - mean: '-3.710e-06' - min: '-1.091e-02' + max: '3.435e-02' + mean: '-2.633e-06' + min: '-1.194e-02' shape: - 1024 - sum: '-3.8e-03' + sum: '-2.696e-03' grads.network.model.decoder.layers.8.fc1.bias: device: cuda:0 - max: '6.571e-03' - mean: '-9.239e-07' - min: '-1.191e-02' + max: '9.447e-03' + mean: '-1.000e-05' + min: '-1.029e-02' shape: - 4096 - sum: '-3.784e-03' + sum: '-4.096e-02' grads.network.model.decoder.layers.8.fc1.weight: device: cuda:0 - max: '1.528e-01' - mean: '-9.493e-10' - min: '-1.682e-01' + max: '1.788e-01' + mean: '-1.028e-08' + min: '-1.565e-01' shape: - 4096 - 1024 - sum: '-3.982e-03' + sum: '-4.31e-02' grads.network.model.decoder.layers.8.fc2.bias: device: cuda:0 - max: '1.032e-02' - mean: '7.276e-12' - min: '-1.079e-02' + max: '9.312e-03' + mean: '1.819e-11' + min: '-9.654e-03' shape: - 1024 - sum: '7.451e-09' + sum: '1.863e-08' grads.network.model.decoder.layers.8.fc2.weight: device: cuda:0 - max: '1.952e-02' - mean: '0.e+00' - min: '-2.184e-02' + max: '2.393e-02' + mean: '6.821e-13' + min: '-1.897e-02' shape: - 1024 - 4096 - sum: '0.e+00' + sum: '2.861e-06' grads.network.model.decoder.layers.8.final_layer_norm.bias: device: cuda:0 - max: '1.166e-02' - mean: '-6.062e-05' - min: '-1.191e-02' + max: '1.033e-02' + mean: '-9.404e-05' + min: '-1.074e-02' shape: - 1024 - sum: '-6.208e-02' + sum: '-9.63e-02' grads.network.model.decoder.layers.8.final_layer_norm.weight: device: cuda:0 - max: '1.406e-02' - mean: '-2.412e-05' - min: '-3.303e-02' + max: '8.312e-03' + mean: '-3.398e-05' + min: '-2.52e-02' shape: - 1024 - sum: '-2.470e-02' + sum: '-3.479e-02' grads.network.model.decoder.layers.8.self_attn.k_proj.bias: device: cuda:0 max: '4.657e-10' - mean: '-6.843e-13' - min: '-4.657e-10' + mean: '1.157e-12' + min: '-7.567e-10' shape: - 1024 - sum: '-7.008e-10' + sum: '1.185e-09' grads.network.model.decoder.layers.8.self_attn.k_proj.weight: device: cuda:0 - max: '1.918e-02' - mean: '6.717e-15' - min: '-2.013e-02' + max: '2.660e-02' + mean: '-1.255e-14' + min: '-2.215e-02' shape: - 1024 - 1024 - sum: '7.043e-09' + sum: '-1.315e-08' grads.network.model.decoder.layers.8.self_attn.out_proj.bias: device: cuda:0 - max: '9.190e-03' - mean: '1.091e-11' - min: '-1.076e-02' + max: '8.574e-03' + mean: '-1.091e-11' + min: '-1.133e-02' shape: - 1024 - sum: '1.118e-08' + sum: '-1.118e-08' grads.network.model.decoder.layers.8.self_attn.out_proj.weight: device: cuda:0 - max: '5.318e-03' - mean: '0.e+00' - min: '-6.160e-03' + max: '5.791e-03' + mean: '1.776e-13' + min: '-7.842e-03' shape: - 1024 - 1024 - sum: '0.e+00' + sum: '1.863e-07' grads.network.model.decoder.layers.8.self_attn.q_proj.bias: device: cuda:0 - max: '1.440e-03' - mean: '6.483e-06' - min: '-1.473e-03' + max: '2.176e-03' + mean: '1.136e-05' + min: '-1.464e-03' shape: - 1024 - sum: '6.638e-03' + sum: '1.164e-02' grads.network.model.decoder.layers.8.self_attn.q_proj.weight: device: cuda:0 - max: '2.656e-02' - mean: '-1.008e-08' - min: '-3.182e-02' + max: '2.919e-02' + mean: '-1.766e-08' + min: '-3.662e-02' shape: - 1024 - 1024 - sum: '-1.056e-02' + sum: '-1.852e-02' grads.network.model.decoder.layers.8.self_attn.v_proj.bias: device: cuda:0 - max: '6.510e-03' - mean: '-4.705e-05' - min: '-9.331e-03' + max: '7.759e-03' + mean: '5.574e-05' + min: '-1.002e-02' shape: - 1024 - sum: '-4.817e-02' + sum: '5.708e-02' grads.network.model.decoder.layers.8.self_attn.v_proj.weight: device: cuda:0 - max: '2.509e-01' - mean: '7.311e-08' - min: '-1.305e-01' + max: '2.583e-01' + mean: '-8.663e-08' + min: '-1.763e-01' shape: - 1024 - 1024 - sum: '7.666e-02' + sum: '-9.083e-02' grads.network.model.decoder.layers.8.self_attn_layer_norm.bias: device: cuda:0 - max: '9.717e-03' - mean: '4.48e-05' - min: '-1.114e-02' + max: '8.934e-03' + mean: '3.720e-05' + min: '-1.170e-02' shape: - 1024 - sum: '4.587e-02' + sum: '3.81e-02' grads.network.model.decoder.layers.8.self_attn_layer_norm.weight: device: cuda:0 - max: '2.655e-02' - mean: '3.601e-07' - min: '-1.405e-02' + max: '1.159e-02' + mean: '-3.363e-06' + min: '-1.334e-02' shape: - 1024 - sum: '3.687e-04' + sum: '-3.444e-03' grads.network.model.decoder.layers.9.fc1.bias: device: cuda:0 - max: '1.194e-02' - mean: '-2.190e-05' - min: '-1.095e-02' + max: '1.084e-02' + mean: '-1.724e-05' + min: '-8.211e-03' shape: - 4096 - sum: '-8.971e-02' + sum: '-7.062e-02' grads.network.model.decoder.layers.9.fc1.weight: device: cuda:0 - max: '2.009e-01' - mean: '-2.11e-08' - min: '-2.559e-01' + max: '1.987e-01' + mean: '-1.661e-08' + min: '-2.721e-01' shape: - 4096 - 1024 - sum: '-8.849e-02' + sum: '-6.966e-02' grads.network.model.decoder.layers.9.fc2.bias: device: cuda:0 - max: '1.111e-02' - mean: '-3.274e-11' - min: '-9.881e-03' + max: '1.032e-02' + mean: '-7.276e-12' + min: '-1.013e-02' shape: - 1024 - sum: '-3.353e-08' + sum: '-7.451e-09' grads.network.model.decoder.layers.9.fc2.weight: device: cuda:0 - max: '2.793e-02' - mean: '-7.958e-13' - min: '-2.691e-02' + max: '2.487e-02' + mean: '-5.684e-13' + min: '-2.754e-02' shape: - 1024 - 4096 - sum: '-3.338e-06' + sum: '-2.384e-06' grads.network.model.decoder.layers.9.final_layer_norm.bias: device: cuda:0 - max: '1.192e-02' - mean: '-5.165e-05' - min: '-1.084e-02' + max: '1.148e-02' + mean: '-7.486e-05' + min: '-1.105e-02' shape: - 1024 - sum: '-5.289e-02' + sum: '-7.665e-02' grads.network.model.decoder.layers.9.final_layer_norm.weight: device: cuda:0 - max: '4.971e-02' - mean: '-1.967e-05' - min: '-1.012e-02' + max: '5.081e-02' + mean: '3.829e-06' + min: '-1.181e-02' shape: - 1024 - sum: '-2.014e-02' + sum: '3.921e-03' grads.network.model.decoder.layers.9.self_attn.k_proj.bias: device: cuda:0 - max: '8.149e-10' - mean: '-1.908e-12' - min: '-2.328e-09' + max: '1.397e-09' + mean: '-3.783e-12' + min: '-2.095e-09' shape: - 1024 - sum: '-1.953e-09' + sum: '-3.874e-09' grads.network.model.decoder.layers.9.self_attn.k_proj.weight: device: cuda:0 - max: '1.124e-01' - mean: '-7.683e-14' - min: '-9.914e-02' + max: '1.288e-01' + mean: '2.314e-13' + min: '-1.159e-01' shape: - 1024 - 1024 - sum: '-8.056e-08' + sum: '2.427e-07' grads.network.model.decoder.layers.9.self_attn.out_proj.bias: device: cuda:0 - max: '1.092e-02' - mean: '6.366e-12' - min: '-9.128e-03' + max: '9.677e-03' + mean: '-2.183e-11' + min: '-9.679e-03' shape: - 1024 - sum: '6.519e-09' + sum: '-2.235e-08' grads.network.model.decoder.layers.9.self_attn.out_proj.weight: device: cuda:0 - max: '8.925e-03' - mean: '1.705e-13' - min: '-9.966e-03' + max: '8.051e-03' + mean: '2.558e-13' + min: '-8.809e-03' shape: - 1024 - 1024 - sum: '1.788e-07' + sum: '2.682e-07' grads.network.model.decoder.layers.9.self_attn.q_proj.bias: device: cuda:0 - max: '2.722e-03' - mean: '-4.813e-06' - min: '-3.995e-03' + max: '3.228e-03' + mean: '-6.335e-06' + min: '-4.683e-03' shape: - 1024 - sum: '-4.929e-03' + sum: '-6.487e-03' grads.network.model.decoder.layers.9.self_attn.q_proj.weight: device: cuda:0 - max: '8.122e-02' - mean: '1.562e-08' - min: '-6.148e-02' + max: '8.449e-02' + mean: '2.055e-08' + min: '-6.571e-02' shape: - 1024 - 1024 - sum: '1.637e-02' + sum: '2.155e-02' grads.network.model.decoder.layers.9.self_attn.v_proj.bias: device: cuda:0 - max: '1.079e-02' - mean: '-3.37e-05' - min: '-9.870e-03' + max: '1.115e-02' + mean: '-3.493e-05' + min: '-9.448e-03' shape: - 1024 - sum: '-3.451e-02' + sum: '-3.577e-02' grads.network.model.decoder.layers.9.self_attn.v_proj.weight: device: cuda:0 - max: '2.169e-01' - mean: '1.093e-07' - min: '-2.438e-01' + max: '2.284e-01' + mean: '1.133e-07' + min: '-2.614e-01' shape: - 1024 - 1024 - sum: '1.146e-01' + sum: '1.188e-01' grads.network.model.decoder.layers.9.self_attn_layer_norm.bias: device: cuda:0 - max: '1.143e-02' - mean: '5.285e-05' - min: '-9.462e-03' + max: '1.015e-02' + mean: '4.447e-05' + min: '-1.010e-02' shape: - 1024 - sum: '5.412e-02' + sum: '4.553e-02' grads.network.model.decoder.layers.9.self_attn_layer_norm.weight: device: cuda:0 - max: '2.183e-02' - mean: '-1.891e-07' - min: '-2.175e-02' + max: '9.655e-03' + mean: '2.292e-06' + min: '-2.027e-02' shape: - 1024 - sum: '-1.936e-04' + sum: '2.347e-03' grads.network.model.decoder.project_in.weight: device: cuda:0 - max: '2.598e-02' - mean: '1.601e-07' - min: '-2.329e-02' + max: '2.645e-02' + mean: '-3.396e-07' + min: '-2.839e-02' shape: - 1024 - 512 - sum: '8.391e-02' + sum: '-1.780e-01' grads.network.model.decoder.project_out.weight: device: cuda:0 - max: '1.123e-01' - mean: '-2.417e-07' - min: '-8.718e-02' + max: '9.968e-02' + mean: '-3.139e-07' + min: '-1.016e-01' shape: - 512 - 1024 - sum: '-1.267e-01' + sum: '-1.646e-01' outputs.loss: device: cuda:0 - max: '4.169e+00' - mean: '4.169e+00' - min: '4.169e+00' + max: '4.05e+00' + mean: '4.05e+00' + min: '4.05e+00' shape: [] - sum: '4.169e+00' + sum: '4.05e+00' diff --git a/.regression_files/project/algorithms/llm_finetuning_test/test_forward_pass_is_reproducible/cuda/llm_finetuning.yaml b/.regression_files/project/algorithms/llm_finetuning_test/test_forward_pass_is_reproducible/cuda/llm_finetuning.yaml index d87dc73e..41f33102 100644 --- a/.regression_files/project/algorithms/llm_finetuning_test/test_forward_pass_is_reproducible/cuda/llm_finetuning.yaml +++ b/.regression_files/project/algorithms/llm_finetuning_test/test_forward_pass_is_reproducible/cuda/llm_finetuning.yaml @@ -10,549 +10,549 @@ input.attention_mask: input.input_ids: device: cuda:0 max: 50118 - mean: '5.265e+03' + mean: '5.447e+03' min: 2 shape: - 8 - 256 - sum: 10781837 + sum: 11154886 input.labels: device: cuda:0 max: 50118 - mean: '5.265e+03' + mean: '5.447e+03' min: 2 shape: - 8 - 256 - sum: 10781837 + sum: 11154886 out.logits: device: cuda:0 - max: '3.507e+01' - mean: '-4.837e+00' - min: '-3.298e+01' + max: '3.537e+01' + mean: '-4.715e+00' + min: '-3.336e+01' shape: - 8 - 256 - 50272 - sum: '-4.98e+08' + sum: '-4.855e+08' out.loss: device: cuda:0 - max: '4.169e+00' - mean: '4.169e+00' - min: '4.169e+00' + max: '4.05e+00' + mean: '4.05e+00' + min: '4.05e+00' shape: [] - sum: '4.169e+00' + sum: '4.05e+00' out.past_key_values.0.0: device: cuda:0 - max: '1.78e+00' - mean: '-3.581e-03' - min: '-2.005e+00' + max: '1.824e+00' + mean: '-3.677e-03' + min: '-2.004e+00' shape: - 8 - 16 - 256 - 64 - sum: '-7.510e+03' + sum: '-7.711e+03' out.past_key_values.0.1: device: cuda:0 - max: '1.665e-01' - mean: '8.363e-05' - min: '-1.568e-01' + max: '1.91e-01' + mean: '6.668e-05' + min: '-1.719e-01' shape: - 8 - 16 - 256 - 64 - sum: '1.754e+02' + sum: '1.398e+02' out.past_key_values.1.0: device: cuda:0 - max: '1.229e+01' - mean: '5.157e-03' - min: '-1.163e+01' + max: '1.150e+01' + mean: '5.521e-03' + min: '-1.144e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.082e+04' + sum: '1.158e+04' out.past_key_values.1.1: device: cuda:0 - max: '4.479e+00' - mean: '2.619e-03' - min: '-4.337e+00' + max: '4.35e+00' + mean: '2.593e-03' + min: '-4.527e+00' shape: - 8 - 16 - 256 - 64 - sum: '5.493e+03' + sum: '5.439e+03' out.past_key_values.10.0: device: cuda:0 - max: '1.004e+01' - mean: '5.535e-02' - min: '-9.954e+00' + max: '9.741e+00' + mean: '5.765e-02' + min: '-1.030e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.161e+05' + sum: '1.209e+05' out.past_key_values.10.1: device: cuda:0 - max: '5.407e+00' - mean: '7.382e-03' - min: '-5.421e+00' + max: '5.526e+00' + mean: '1.023e-02' + min: '-5.248e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.548e+04' + sum: '2.145e+04' out.past_key_values.11.0: device: cuda:0 - max: '9.222e+00' - mean: '4.912e-02' - min: '-8.656e+00' + max: '9.2e+00' + mean: '4.524e-02' + min: '-8.32e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.030e+05' + sum: '9.488e+04' out.past_key_values.11.1: device: cuda:0 - max: '4.49e+00' - mean: '6.813e-03' - min: '-4.356e+00' + max: '4.676e+00' + mean: '7.994e-03' + min: '-4.337e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.429e+04' + sum: '1.676e+04' out.past_key_values.12.0: device: cuda:0 - max: '8.792e+00' - mean: '-1.832e-03' - min: '-8.094e+00' + max: '8.099e+00' + mean: '-4.339e-03' + min: '-8.358e+00' shape: - 8 - 16 - 256 - 64 - sum: '-3.842e+03' + sum: '-9.101e+03' out.past_key_values.12.1: device: cuda:0 - max: '5.004e+00' - mean: '5.763e-03' - min: '-5.606e+00' + max: '5.357e+00' + mean: '7.804e-03' + min: '-5.152e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.209e+04' + sum: '1.637e+04' out.past_key_values.13.0: device: cuda:0 - max: '8.343e+00' - mean: '-3.719e-03' - min: '-8.637e+00' + max: '8.449e+00' + mean: '-9.491e-03' + min: '-8.29e+00' shape: - 8 - 16 - 256 - 64 - sum: '-7.799e+03' + sum: '-1.990e+04' out.past_key_values.13.1: device: cuda:0 - max: '4.977e+00' - mean: '2.154e-03' - min: '-4.84e+00' + max: '4.555e+00' + mean: '3.872e-03' + min: '-5.178e+00' shape: - 8 - 16 - 256 - 64 - sum: '4.518e+03' + sum: '8.120e+03' out.past_key_values.14.0: device: cuda:0 - max: '8.527e+00' - mean: '-3.708e-02' - min: '-8.576e+00' + max: '7.696e+00' + mean: '-4.042e-02' + min: '-8.394e+00' shape: - 8 - 16 - 256 - 64 - sum: '-7.777e+04' + sum: '-8.477e+04' out.past_key_values.14.1: device: cuda:0 - max: '5.15e+00' - mean: '5.069e-03' - min: '-5.532e+00' + max: '5.031e+00' + mean: '3.803e-03' + min: '-5.123e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.063e+04' + sum: '7.976e+03' out.past_key_values.15.0: device: cuda:0 - max: '8.152e+00' - mean: '2.418e-02' - min: '-9.593e+00' + max: '8.108e+00' + mean: '2.572e-02' + min: '-1.000e+01' shape: - 8 - 16 - 256 - 64 - sum: '5.071e+04' + sum: '5.394e+04' out.past_key_values.15.1: device: cuda:0 - max: '5.053e+00' - mean: '-9.564e-03' - min: '-5.126e+00' + max: '4.85e+00' + mean: '-8.774e-03' + min: '-4.855e+00' shape: - 8 - 16 - 256 - 64 - sum: '-2.006e+04' + sum: '-1.840e+04' out.past_key_values.16.0: device: cuda:0 - max: '8.555e+00' - mean: '-2.003e-02' - min: '-7.960e+00' + max: '8.927e+00' + mean: '-1.676e-02' + min: '-8.144e+00' shape: - 8 - 16 - 256 - 64 - sum: '-4.201e+04' + sum: '-3.515e+04' out.past_key_values.16.1: device: cuda:0 - max: '4.549e+00' - mean: '-9.877e-03' - min: '-5.229e+00' + max: '4.793e+00' + mean: '-1.081e-02' + min: '-5.854e+00' shape: - 8 - 16 - 256 - 64 - sum: '-2.071e+04' + sum: '-2.268e+04' out.past_key_values.17.0: device: cuda:0 - max: '9.987e+00' - mean: '1.882e-02' - min: '-1.047e+01' + max: '1.004e+01' + mean: '2.810e-02' + min: '-9.726e+00' shape: - 8 - 16 - 256 - 64 - sum: '3.946e+04' + sum: '5.893e+04' out.past_key_values.17.1: device: cuda:0 - max: '5.499e+00' - mean: '4.046e-03' - min: '-4.751e+00' + max: '5.284e+00' + mean: '5.285e-03' + min: '-5.681e+00' shape: - 8 - 16 - 256 - 64 - sum: '8.486e+03' + sum: '1.108e+04' out.past_key_values.18.0: device: cuda:0 - max: '8.157e+00' - mean: '4.879e-02' - min: '-8.859e+00' + max: '8.982e+00' + mean: '5.052e-02' + min: '-8.762e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.023e+05' + sum: '1.059e+05' out.past_key_values.18.1: device: cuda:0 - max: '4.687e+00' - mean: '-2.521e-03' - min: '-4.955e+00' + max: '4.748e+00' + mean: '-1.694e-03' + min: '-4.891e+00' shape: - 8 - 16 - 256 - 64 - sum: '-5.287e+03' + sum: '-3.554e+03' out.past_key_values.19.0: device: cuda:0 - max: '1.015e+01' - mean: '1.528e-02' - min: '-1.027e+01' + max: '9.813e+00' + mean: '1.273e-02' + min: '-9.707e+00' shape: - 8 - 16 - 256 - 64 - sum: '3.205e+04' + sum: '2.670e+04' out.past_key_values.19.1: device: cuda:0 - max: '4.66e+00' - mean: '-1.661e-02' - min: '-5.154e+00' + max: '4.619e+00' + mean: '-1.924e-02' + min: '-4.700e+00' shape: - 8 - 16 - 256 - 64 - sum: '-3.483e+04' + sum: '-4.036e+04' out.past_key_values.2.0: device: cuda:0 - max: '1.064e+01' - mean: '7.244e-02' - min: '-1.031e+01' + max: '1.074e+01' + mean: '6.862e-02' + min: '-1.063e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.519e+05' + sum: '1.439e+05' out.past_key_values.2.1: device: cuda:0 - max: '4.712e+00' - mean: '2.248e-03' - min: '-4.234e+00' + max: '4.396e+00' + mean: '2.223e-03' + min: '-4.462e+00' shape: - 8 - 16 - 256 - 64 - sum: '4.714e+03' + sum: '4.662e+03' out.past_key_values.20.0: device: cuda:0 - max: '1.099e+01' - mean: '5.109e-02' - min: '-1.172e+01' + max: '1.106e+01' + mean: '5.73e-02' + min: '-1.099e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.071e+05' + sum: '1.202e+05' out.past_key_values.20.1: device: cuda:0 - max: '5.022e+00' - mean: '5.842e-03' - min: '-6.663e+00' + max: '4.813e+00' + mean: '6.246e-03' + min: '-5.477e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.225e+04' + sum: '1.31e+04' out.past_key_values.21.0: device: cuda:0 - max: '1.132e+01' - mean: '5.089e-02' - min: '-1.055e+01' + max: '1.079e+01' + mean: '4.522e-02' + min: '-1.039e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.067e+05' + sum: '9.484e+04' out.past_key_values.21.1: device: cuda:0 - max: '4.731e+00' - mean: '1.276e-02' - min: '-4.486e+00' + max: '4.631e+00' + mean: '1.379e-02' + min: '-4.818e+00' shape: - 8 - 16 - 256 - 64 - sum: '2.676e+04' + sum: '2.891e+04' out.past_key_values.22.0: device: cuda:0 - max: '1.03e+01' - mean: '4.091e-02' - min: '-1.162e+01' + max: '1.065e+01' + mean: '4.017e-02' + min: '-1.125e+01' shape: - 8 - 16 - 256 - 64 - sum: '8.579e+04' + sum: '8.425e+04' out.past_key_values.22.1: device: cuda:0 - max: '4.647e+00' - mean: '8.237e-03' - min: '-5.057e+00' + max: '5.105e+00' + mean: '5.328e-03' + min: '-4.445e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.727e+04' + sum: '1.117e+04' out.past_key_values.23.0: device: cuda:0 - max: '8.126e+00' - mean: '1.065e-02' - min: '-8.797e+00' + max: '9.464e+00' + mean: '1.056e-02' + min: '-8.453e+00' shape: - 8 - 16 - 256 - 64 - sum: '2.233e+04' + sum: '2.214e+04' out.past_key_values.23.1: device: cuda:0 - max: '5.348e+00' - mean: '-1.145e-03' - min: '-4.637e+00' + max: '4.379e+00' + mean: '-1.464e-03' + min: '-4.951e+00' shape: - 8 - 16 - 256 - 64 - sum: '-2.401e+03' + sum: '-3.069e+03' out.past_key_values.3.0: device: cuda:0 - max: '1.095e+01' - mean: '4.414e-02' - min: '-1.056e+01' + max: '1.142e+01' + mean: '4.512e-02' + min: '-1.147e+01' shape: - 8 - 16 - 256 - 64 - sum: '9.256e+04' + sum: '9.462e+04' out.past_key_values.3.1: device: cuda:0 - max: '4.339e+00' - mean: '-2.309e-03' - min: '-4.796e+00' + max: '4.416e+00' + mean: '-3.978e-04' + min: '-4.476e+00' shape: - 8 - 16 - 256 - 64 - sum: '-4.843e+03' + sum: '-8.342e+02' out.past_key_values.4.0: device: cuda:0 - max: '1.216e+01' - mean: '-2.735e-02' - min: '-1.132e+01' + max: '1.193e+01' + mean: '-3.041e-02' + min: '-1.091e+01' shape: - 8 - 16 - 256 - 64 - sum: '-5.735e+04' + sum: '-6.377e+04' out.past_key_values.4.1: device: cuda:0 - max: '4.455e+00' - mean: '5.272e-04' - min: '-5.199e+00' + max: '4.839e+00' + mean: '-4.185e-04' + min: '-5.120e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.106e+03' + sum: '-8.776e+02' out.past_key_values.5.0: device: cuda:0 - max: '1.146e+01' - mean: '4.958e-02' - min: '-1.178e+01' + max: '1.230e+01' + mean: '4.608e-02' + min: '-1.164e+01' shape: - 8 - 16 - 256 - 64 - sum: '1.04e+05' + sum: '9.664e+04' out.past_key_values.5.1: device: cuda:0 - max: '4.7e+00' - mean: '9.000e-04' - min: '-4.806e+00' + max: '5.191e+00' + mean: '1.398e-03' + min: '-4.402e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.887e+03' + sum: '2.932e+03' out.past_key_values.6.0: device: cuda:0 - max: '1.156e+01' - mean: '3.090e-03' - min: '-1.303e+01' + max: '1.248e+01' + mean: '6.588e-03' + min: '-1.322e+01' shape: - 8 - 16 - 256 - 64 - sum: '6.480e+03' + sum: '1.382e+04' out.past_key_values.6.1: device: cuda:0 - max: '4.412e+00' - mean: '4.780e-03' - min: '-4.179e+00' + max: '4.148e+00' + mean: '5.169e-03' + min: '-4.295e+00' shape: - 8 - 16 - 256 - 64 - sum: '1.003e+04' + sum: '1.084e+04' out.past_key_values.7.0: device: cuda:0 - max: '1.417e+01' - mean: '-1.118e-02' - min: '-1.204e+01' + max: '1.326e+01' + mean: '-1.400e-02' + min: '-1.272e+01' shape: - 8 - 16 - 256 - 64 - sum: '-2.346e+04' + sum: '-2.936e+04' out.past_key_values.7.1: device: cuda:0 - max: '3.719e+00' - mean: '3.800e-03' - min: '-4.241e+00' + max: '4.043e+00' + mean: '5.246e-03' + min: '-3.823e+00' shape: - 8 - 16 - 256 - 64 - sum: '7.970e+03' + sum: '1.100e+04' out.past_key_values.8.0: device: cuda:0 - max: '1.256e+01' - mean: '1.216e-02' - min: '-1.361e+01' + max: '1.329e+01' + mean: '1.543e-02' + min: '-1.222e+01' shape: - 8 - 16 - 256 - 64 - sum: '2.551e+04' + sum: '3.235e+04' out.past_key_values.8.1: device: cuda:0 - max: '4.220e+00' - mean: '-9.122e-04' - min: '-4.401e+00' + max: '4.179e+00' + mean: '-1.275e-03' + min: '-4.191e+00' shape: - 8 - 16 - 256 - 64 - sum: '-1.913e+03' + sum: '-2.674e+03' out.past_key_values.9.0: device: cuda:0 - max: '1.426e+01' + max: '1.514e+01' mean: '-1.051e-01' - min: '-1.891e+01' + min: '-1.701e+01' shape: - 8 - 16 @@ -561,12 +561,12 @@ out.past_key_values.9.0: sum: '-2.204e+05' out.past_key_values.9.1: device: cuda:0 - max: '5.008e+00' - mean: '2.591e-04' - min: '-4.651e+00' + max: '4.456e+00' + mean: '3.825e-04' + min: '-4.440e+00' shape: - 8 - 16 - 256 - 64 - sum: '5.433e+02' + sum: '8.022e+02'