layer_id
int64
0
223
name
stringlengths
26
32
D
float64
0.03
0.18
M
int64
1.02k
4.1k
N
int64
4.1k
14.3k
Q
float64
1
4
alpha
float64
2.02
8.65
alpha_weighted
float64
-20.71
-1.63
entropy
float64
0.77
1.55
has_esd
bool
1 class
lambda_max
float32
0
0.2
layer_type
stringclasses
1 value
log_alpha_norm
float64
-20.64
-1.6
log_norm
float32
-1.48
-0.16
log_spectral_norm
float32
-2.44
-0.7
matrix_rank
int64
64
64
norm
float32
0.03
0.69
num_evals
int64
1.02k
4.1k
num_pl_spikes
int64
5
64
rank_loss
int64
960
4.03k
rf
int64
1
1
sigma
float64
0.19
1.31
spectral_norm
float32
0
0.2
stable_rank
float32
1.95
30.3
status
stringclasses
1 value
sv_max
float64
0.06
0.45
sv_min
float64
0
0
warning
stringclasses
2 values
weak_rank_loss
int64
960
4.03k
xmax
float64
0
0.2
xmin
float64
0
0.01
200
model.layers.28.self_attn.o_proj
0.145735
4,096
4,096
1
2.538182
-4.21126
1.484161
true
0.02192
dense
-4.172619
-0.93611
-1.659164
64
0.115848
4,096
8
4,032
1
0.543829
0.02192
5.285113
success
0.148053
0
4,032
0.02192
0.001596
201
model.layers.28.self_attn.q_proj
0.057402
4,096
4,096
1
4.045189
-7.383231
1.53592
true
0.014956
dense
-7.270875
-0.743321
-1.825188
64
0.180584
4,096
31
4,032
1
0.546932
0.014956
12.074456
success
0.122294
0
4,032
0.014956
0.002292
202
model.layers.28.self_attn.v_proj
0.082885
1,024
4,096
4
7.002583
-15.899724
1.116428
true
0.005364
dense
-15.899541
-1.207922
-2.270551
64
0.061955
1,024
64
960
1
0.750323
0.005364
11.551245
success
0.073236
0.000001
under-trained
960
0.005364
0.000776
203
model.layers.29.mlp.down_proj
0.103636
4,096
14,336
3.5
2.782662
-4.314618
1.512735
true
0.028149
dense
-4.116666
-0.589399
-1.550536
64
0.257396
4,096
16
4,032
1
0.445665
0.028149
9.144027
success
0.167777
0.000001
4,032
0.028149
0.003201
204
model.layers.29.mlp.gate_proj
0.13293
4,096
14,336
3.5
2.831996
-4.282459
1.531378
true
0.030749
dense
-4.011525
-0.439982
-1.51217
64
0.363093
4,096
12
4,032
1
0.528852
0.030749
11.808329
success
0.175354
0.000001
4,032
0.030749
0.005316
205
model.layers.29.mlp.up_proj
0.131563
4,096
14,336
3.5
2.644039
-3.954942
1.5121
true
0.03193
dense
-3.709622
-0.525436
-1.495796
64
0.298239
4,096
13
4,032
1
0.455974
0.03193
9.340278
success
0.178691
0.000001
4,032
0.03193
0.003979
206
model.layers.29.self_attn.k_proj
0.0856
1,024
4,096
4
3.743662
-8.806612
1.119421
true
0.004442
dense
-8.455153
-1.076607
-2.352406
64
0.083829
1,024
18
960
1
0.646687
0.004442
18.871141
success
0.06665
0.000001
960
0.004442
0.00123
207
model.layers.29.self_attn.o_proj
0.122168
4,096
4,096
1
2.673862
-4.427946
1.495641
true
0.022079
dense
-4.392883
-0.894836
-1.656011
64
0.127398
4,096
8
4,032
1
0.5918
0.022079
5.769995
success
0.148592
0
4,032
0.022079
0.001842
208
model.layers.29.self_attn.q_proj
0.066318
4,096
4,096
1
4.574451
-8.434781
1.533344
true
0.014326
dense
-8.289402
-0.76285
-1.843889
64
0.172644
4,096
52
4,032
1
0.495687
0.014326
12.051463
success
0.119689
0
4,032
0.014326
0.001952
209
model.layers.29.self_attn.v_proj
0.11421
1,024
4,096
4
7.529444
-17.524871
1.123541
true
0.004704
dense
-17.522147
-1.145954
-2.327512
64
0.071457
1,024
64
960
1
0.816181
0.004704
15.19001
success
0.068587
0.000001
under-trained
960
0.004704
0.00092
210
model.layers.30.mlp.down_proj
0.156046
4,096
14,336
3.5
2.608906
-4.179348
1.511715
true
0.025006
dense
-3.823854
-0.581968
-1.601954
64
0.261837
4,096
12
4,032
1
0.464451
0.025006
10.470941
success
0.158133
0.000001
4,032
0.025006
0.003647
211
model.layers.30.mlp.gate_proj
0.124557
4,096
14,336
3.5
2.782323
-4.270743
1.530313
true
0.029177
dense
-3.933391
-0.440372
-1.534956
64
0.362767
4,096
13
4,032
1
0.494327
0.029177
12.433222
success
0.170813
0.000001
4,032
0.029177
0.005054
212
model.layers.30.mlp.up_proj
0.170626
4,096
14,336
3.5
2.554896
-3.997181
1.5195
true
0.027257
dense
-3.611438
-0.509766
-1.564518
64
0.309196
4,096
12
4,032
1
0.44886
0.027257
11.343627
success
0.165098
0.000001
4,032
0.027257
0.004161
213
model.layers.30.self_attn.k_proj
0.101164
1,024
4,096
4
5.358292
-12.778783
1.116238
true
0.004122
dense
-12.638126
-1.159035
-2.384861
64
0.069337
1,024
64
960
1
0.544786
0.004122
16.820019
success
0.064205
0.000001
960
0.004122
0.000805
214
model.layers.30.self_attn.o_proj
0.074305
4,096
4,096
1
2.579508
-4.380893
1.48816
true
0.020029
dense
-4.329809
-0.941632
-1.698345
64
0.114385
4,096
11
4,032
1
0.476239
0.020029
5.711001
success
0.141523
0
4,032
0.020029
0.001496
215
model.layers.30.self_attn.q_proj
0.09806
4,096
4,096
1
2.834588
-5.029225
1.517705
true
0.016818
dense
-4.804394
-0.777677
-1.774235
64
0.166849
4,096
13
4,032
1
0.508823
0.016818
9.921068
success
0.129683
0
4,032
0.016818
0.002495
216
model.layers.30.self_attn.v_proj
0.102737
1,024
4,096
4
7.731426
-18.03403
1.120981
true
0.00465
dense
-18.03366
-1.205279
-2.332562
64
0.062333
1,024
64
960
1
0.841428
0.00465
13.405494
success
0.06819
0.000001
under-trained
960
0.00465
0.000802
217
model.layers.31.mlp.down_proj
0.079029
4,096
14,336
3.5
2.902343
-4.850056
1.512488
true
0.021326
dense
-4.475008
-0.611118
-1.671083
64
0.24484
4,096
25
4,032
1
0.380469
0.021326
11.480594
success
0.146036
0.000001
4,032
0.021326
0.002777
218
model.layers.31.mlp.gate_proj
0.097624
4,096
14,336
3.5
3.230765
-5.040881
1.538523
true
0.027525
dense
-4.779333
-0.424897
-1.560275
64
0.375927
4,096
17
4,032
1
0.54104
0.027525
13.657705
success
0.165906
0.000001
4,032
0.027525
0.005125
219
model.layers.31.mlp.up_proj
0.113964
4,096
14,336
3.5
2.912509
-4.665717
1.536689
true
0.025006
dense
-4.422057
-0.510184
-1.601958
64
0.308899
4,096
13
4,032
1
0.530434
0.025006
12.353059
success
0.158132
0.000001
4,032
0.025006
0.004429
220
model.layers.31.self_attn.k_proj
0.049394
1,024
4,096
4
4.553209
-11.118033
1.124065
true
0.003616
dense
-10.97038
-1.167458
-2.441802
64
0.068005
1,024
24
960
1
0.725296
0.003616
18.80805
success
0.060131
0.000001
960
0.003616
0.000974
221
model.layers.31.self_attn.o_proj
0.087925
4,096
4,096
1
2.453679
-4.101816
1.485777
true
0.021296
dense
-4.023601
-0.907667
-1.6717
64
0.123689
4,096
11
4,032
1
0.438301
0.021296
5.808087
success
0.145932
0
4,032
0.021296
0.001541
222
model.layers.31.self_attn.q_proj
0.065747
4,096
4,096
1
4.085555
-7.643018
1.530533
true
0.013467
dense
-7.501466
-0.788933
-1.870742
64
0.16258
4,096
49
4,032
1
0.440794
0.013467
12.072827
success
0.116046
0
4,032
0.013467
0.001781
223
model.layers.31.self_attn.v_proj
0.11723
1,024
4,096
4
7.89883
-18.247252
1.122072
true
0.004896
dense
-18.247011
-1.171979
-2.310121
64
0.067301
1,024
64
960
1
0.862354
0.004896
13.744894
success
0.069974
0.000001
under-trained
960
0.004896
0.000872