layer_id
int64
0
223
name
stringlengths
26
32
D
float64
0.03
0.18
M
int64
1.02k
4.1k
N
int64
4.1k
14.3k
Q
float64
1
4
alpha
float64
2.02
8.65
alpha_weighted
float64
-20.71
-1.63
entropy
float64
0.77
1.55
has_esd
bool
1 class
lambda_max
float32
0
0.2
layer_type
stringclasses
1 value
log_alpha_norm
float64
-20.64
-1.6
log_norm
float32
-1.48
-0.16
log_spectral_norm
float32
-2.44
-0.7
matrix_rank
int64
64
64
norm
float32
0.03
0.69
num_evals
int64
1.02k
4.1k
num_pl_spikes
int64
5
64
rank_loss
int64
960
4.03k
rf
int64
1
1
sigma
float64
0.19
1.31
spectral_norm
float32
0
0.2
stable_rank
float32
1.95
30.3
status
stringclasses
1 value
sv_max
float64
0.06
0.45
sv_min
float64
0
0
warning
stringclasses
2 values
weak_rank_loss
int64
960
4.03k
xmax
float64
0
0.2
xmin
float64
0
0.01
0
model.layers.0.mlp.down_proj
0.052897
4,096
14,336
3.5
2.677719
-3.456077
1.488533
true
0.051206
dense
-3.261402
-0.390622
-1.29068
64
0.406797
4,096
26
4,032
1
0.329028
0.051206
7.944337
success
0.226287
0.000001
4,032
0.051206
0.004364
1
model.layers.0.mlp.gate_proj
0.06163
4,096
14,336
3.5
2.547375
-2.586054
1.457833
true
0.096564
dense
-2.50109
-0.26605
-1.015184
64
0.541939
4,096
29
4,032
1
0.28734
0.096564
5.612213
success
0.310748
0.000001
4,032
0.096564
0.005125
2
model.layers.0.mlp.up_proj
0.067622
4,096
14,336
3.5
2.495929
-2.590564
1.45299
true
0.09164
dense
-2.473998
-0.271746
-1.037916
64
0.534877
4,096
34
4,032
1
0.25655
0.09164
5.836728
success
0.302721
0.000001
4,032
0.09164
0.004495
3
model.layers.0.self_attn.k_proj
0.081234
1,024
4,096
4
3.123636
-3.951471
1.003636
true
0.054322
dense
-3.861217
-0.531794
-1.265023
64
0.293905
1,024
64
960
1
0.265454
0.054322
5.410403
success
0.233071
0.000001
960
0.054322
0.002011
4
model.layers.0.self_attn.o_proj
0.059503
4,096
4,096
1
2.024146
-2.355433
1.318973
true
0.068601
dense
-2.190306
-0.589637
-1.163668
64
0.257254
4,096
30
4,032
1
0.186983
0.068601
3.749991
success
0.261919
0
4,032
0.068601
0.001346
5
model.layers.0.self_attn.q_proj
0.071362
4,096
4,096
1
2.232562
-2.12743
1.414472
true
0.111453
dense
-2.032254
-0.308862
-0.95291
64
0.491064
4,096
18
4,032
1
0.290518
0.111453
4.406033
success
0.333845
0
4,032
0.111453
0.004887
6
model.layers.0.self_attn.v_proj
0.077723
1,024
4,096
4
2.064591
-3.133218
0.918566
true
0.030367
dense
-3.043222
-0.983914
-1.517598
64
0.103773
1,024
17
960
1
0.258201
0.030367
3.417306
success
0.174261
0.000001
960
0.030367
0.000892
7
model.layers.1.mlp.down_proj
0.064719
4,096
14,336
3.5
3.166249
-4.832687
1.524829
true
0.029764
dense
-4.576148
-0.452895
-1.526313
64
0.352456
4,096
24
4,032
1
0.442184
0.029764
11.84182
success
0.172522
0.000001
4,032
0.029764
0.004417
8
model.layers.1.mlp.gate_proj
0.085658
4,096
14,336
3.5
2.889781
-4.001024
1.503489
true
0.041253
dense
-3.746318
-0.390278
-1.384542
64
0.40712
4,096
28
4,032
1
0.357135
0.041253
9.868801
success
0.203109
0.000001
4,032
0.041253
0.0044
9
model.layers.1.mlp.up_proj
0.104817
4,096
14,336
3.5
2.726057
-3.817446
1.498305
true
0.039778
dense
-3.449987
-0.381046
-1.400355
64
0.415866
4,096
24
4,032
1
0.35233
0.039778
10.454619
success
0.199445
0.000001
4,032
0.039778
0.004627
10
model.layers.1.self_attn.k_proj
0.07634
1,024
4,096
4
2.42049
-3.081766
1.022545
true
0.053309
dense
-2.961904
-0.531019
-1.273199
64
0.29443
1,024
16
960
1
0.355122
0.053309
5.523074
success
0.230887
0.000001
960
0.053309
0.003529
11
model.layers.1.self_attn.o_proj
0.047832
4,096
4,096
1
2.456333
-3.962271
1.454103
true
0.024373
dense
-3.674764
-0.75138
-1.613084
64
0.177264
4,096
35
4,032
1
0.246165
0.024373
7.272831
success
0.15612
0
4,032
0.024373
0.001432
12
model.layers.1.self_attn.q_proj
0.040733
4,096
4,096
1
2.347499
-1.631893
1.372005
true
0.201761
dense
-1.596231
-0.160934
-0.695162
64
0.690344
4,096
26
4,032
1
0.264266
0.201761
3.421591
success
0.449178
0
4,032
0.201761
0.005533
13
model.layers.1.self_attn.v_proj
0.076101
1,024
4,096
4
2.199378
-2.759386
0.769733
true
0.055639
dense
-2.754431
-0.964904
-1.254621
64
0.108417
1,024
12
960
1
0.346231
0.055639
1.948576
success
0.235879
0.000001
960
0.055639
0.000913
14
model.layers.2.mlp.down_proj
0.087654
4,096
14,336
3.5
2.953663
-4.509285
1.520652
true
0.029739
dense
-4.330108
-0.516344
-1.526675
64
0.304548
4,096
21
4,032
1
0.426324
0.029739
10.240739
success
0.17245
0.000001
4,032
0.029739
0.003887
15
model.layers.2.mlp.gate_proj
0.068122
4,096
14,336
3.5
2.794675
-3.954023
1.497494
true
0.038473
dense
-3.703116
-0.448609
-1.414842
64
0.355952
4,096
28
4,032
1
0.339162
0.038473
9.251945
success
0.196146
0.000001
4,032
0.038473
0.003766
16
model.layers.2.mlp.up_proj
0.083981
4,096
14,336
3.5
2.632966
-3.601815
1.483391
true
0.042858
dense
-3.368448
-0.45853
-1.367969
64
0.347913
4,096
27
4,032
1
0.314264
0.042858
8.117814
success
0.207022
0.000001
4,032
0.042858
0.003515
17
model.layers.2.self_attn.k_proj
0.144103
1,024
4,096
4
5.325873
-8.750114
1.074845
true
0.022754
dense
-8.74541
-0.788276
-1.642945
64
0.162826
1,024
64
960
1
0.540734
0.022754
7.155963
success
0.150844
0.000001
960
0.022754
0.001726
18
model.layers.2.self_attn.o_proj
0.053145
4,096
4,096
1
2.562537
-4.087537
1.438093
true
0.025403
dense
-3.999081
-0.885766
-1.595113
64
0.130087
4,096
31
4,032
1
0.28064
0.025403
5.120908
success
0.159383
0
4,032
0.025403
0.001113
19
model.layers.2.self_attn.q_proj
0.068557
4,096
4,096
1
3.245647
-5.122339
1.513713
true
0.026411
dense
-4.95726
-0.597474
-1.578218
64
0.252654
4,096
27
4,032
1
0.432175
0.026411
9.566302
success
0.162514
0
4,032
0.026411
0.002975
20
model.layers.2.self_attn.v_proj
0.074834
1,024
4,096
4
2.862386
-6.276086
1.069041
true
0.006418
dense
-6.222477
-1.363557
-2.192607
64
0.043296
1,024
17
960
1
0.451695
0.006418
6.746058
success
0.080112
0.000001
960
0.006418
0.00055
21
model.layers.3.mlp.down_proj
0.083451
4,096
14,336
3.5
3.073457
-4.79474
1.522787
true
0.027539
dense
-4.615598
-0.534599
-1.560048
64
0.292012
4,096
22
4,032
1
0.442063
0.027539
10.60348
success
0.16595
0.000001
4,032
0.027539
0.003667
22
model.layers.3.mlp.gate_proj
0.088245
4,096
14,336
3.5
3.076395
-4.664228
1.514846
true
0.03047
dense
-4.428282
-0.476946
-1.516134
64
0.333468
4,096
32
4,032
1
0.367058
0.03047
10.944314
success
0.174555
0.000001
4,032
0.03047
0.003589
23
model.layers.3.mlp.up_proj
0.107826
4,096
14,336
3.5
2.699548
-3.904319
1.5027
true
0.035786
dense
-3.679445
-0.484377
-1.446286
64
0.327811
4,096
21
4,032
1
0.370872
0.035786
9.160287
success
0.189172
0.000001
4,032
0.035786
0.003935
24
model.layers.3.self_attn.k_proj
0.114861
1,024
4,096
4
2.347158
-3.257596
1.031844
true
0.040936
dense
-3.199793
-0.706221
-1.38789
64
0.196689
1,024
8
960
1
0.476292
0.040936
4.804727
success
0.202328
0.000001
960
0.040936
0.002814
25
model.layers.3.self_attn.o_proj
0.053945
4,096
4,096
1
2.784179
-4.747886
1.478179
true
0.01971
dense
-4.662831
-0.906983
-1.705309
64
0.123884
4,096
23
4,032
1
0.372027
0.01971
6.285302
success
0.140393
0
4,032
0.01971
0.001391
26
model.layers.3.self_attn.q_proj
0.074233
4,096
4,096
1
2.802199
-3.856473
1.489978
true
0.04205
dense
-3.66984
-0.456454
-1.376231
64
0.34958
4,096
33
4,032
1
0.313723
0.04205
8.313359
success
0.205062
0
4,032
0.04205
0.003464
27
model.layers.3.self_attn.v_proj
0.077611
1,024
4,096
4
2.931664
-6.616693
1.084328
true
0.005534
dense
-6.554824
-1.373657
-2.256975
64
0.0423
1,024
12
960
1
0.557623
0.005534
7.64396
success
0.07439
0.000001
960
0.005534
0.000605
28
model.layers.4.mlp.down_proj
0.073888
4,096
14,336
3.5
3.095705
-4.439551
1.505269
true
0.036804
dense
-4.391367
-0.569872
-1.4341
64
0.269233
4,096
25
4,032
1
0.419141
0.036804
7.315219
success
0.191845
0.000001
4,032
0.036804
0.003111
29
model.layers.4.mlp.gate_proj
0.096931
4,096
14,336
3.5
2.943779
-4.451378
1.517809
true
0.030752
dense
-4.217382
-0.478373
-1.512131
64
0.332374
4,096
25
4,032
1
0.388756
0.030752
10.808311
success
0.175362
0.000001
4,032
0.030752
0.003887
30
model.layers.4.mlp.up_proj
0.083876
4,096
14,336
3.5
2.721092
-3.737681
1.491075
true
0.042306
dense
-3.624831
-0.515658
-1.373596
64
0.305029
4,096
22
4,032
1
0.366938
0.042306
7.21004
success
0.205685
0.000001
4,032
0.042306
0.003489
31
model.layers.4.self_attn.k_proj
0.104467
1,024
4,096
4
3.135139
-5.976866
1.100359
true
0.012405
dense
-5.860833
-0.897017
-1.906412
64
0.12676
1,024
12
960
1
0.616362
0.012405
10.218679
success
0.111377
0.000001
960
0.012405
0.001908
32
model.layers.4.self_attn.o_proj
0.070967
4,096
4,096
1
2.843256
-4.698893
1.450936
true
0.022251
dense
-4.67105
-0.976651
-1.652645
64
0.105523
4,096
23
4,032
1
0.384346
0.022251
4.742352
success
0.149169
0
4,032
0.022251
0.001125
33
model.layers.4.self_attn.q_proj
0.064561
4,096
4,096
1
3.171323
-5.389224
1.515203
true
0.019982
dense
-5.029191
-0.61956
-1.699361
64
0.240127
4,096
33
4,032
1
0.377979
0.019982
12.017151
success
0.141358
0
4,032
0.019982
0.002645
34
model.layers.4.self_attn.v_proj
0.100189
1,024
4,096
4
2.517159
-5.668791
1.079474
true
0.005597
dense
-5.568282
-1.396234
-2.25206
64
0.040157
1,024
8
960
1
0.536397
0.005597
7.175061
success
0.074812
0.000001
960
0.005597
0.000613
35
model.layers.5.mlp.down_proj
0.068227
4,096
14,336
3.5
2.857705
-4.089038
1.503931
true
0.037078
dense
-4.030817
-0.584083
-1.430882
64
0.260565
4,096
16
4,032
1
0.464426
0.037078
7.027461
success
0.192557
0.000001
4,032
0.037078
0.003474
36
model.layers.5.mlp.gate_proj
0.08291
4,096
14,336
3.5
3.031947
-4.701401
1.526677
true
0.028144
dense
-4.460686
-0.47992
-1.550621
64
0.331192
4,096
20
4,032
1
0.454357
0.028144
11.767975
success
0.16776
0.000001
4,032
0.028144
0.004329
37
model.layers.5.mlp.up_proj
0.08546
4,096
14,336
3.5
2.778091
-3.848164
1.49447
true
0.041192
dense
-3.75802
-0.534364
-1.385183
64
0.29217
4,096
22
4,032
1
0.37909
0.041192
7.092812
success
0.202959
0.000001
4,032
0.041192
0.003339
38
model.layers.5.self_attn.k_proj
0.107691
1,024
4,096
4
2.688029
-4.948406
1.094828
true
0.014424
dense
-4.833602
-0.906374
-1.840905
64
0.124058
1,024
8
960
1
0.596808
0.014424
8.600653
success
0.120101
0.000001
960
0.014424
0.001872
39
model.layers.5.self_attn.o_proj
0.122563
4,096
4,096
1
2.618946
-4.492254
1.475117
true
0.019262
dense
-4.436765
-0.970445
-1.715291
64
0.107042
4,096
13
4,032
1
0.449015
0.019262
5.557077
success
0.138789
0
4,032
0.019262
0.001453
40
model.layers.5.self_attn.q_proj
0.063356
4,096
4,096
1
3.55331
-6.197344
1.53308
true
0.018026
dense
-6.054521
-0.676972
-1.744104
64
0.210392
4,096
23
4,032
1
0.532402
0.018026
11.671661
success
0.13426
0
4,032
0.018026
0.002775
41
model.layers.5.self_attn.v_proj
0.115764
1,024
4,096
4
2.647311
-6.116906
1.086778
true
0.004891
dense
-5.990173
-1.397493
-2.310611
64
0.040041
1,024
9
960
1
0.549104
0.004891
8.186871
success
0.069935
0.000001
960
0.004891
0.000592
42
model.layers.6.mlp.down_proj
0.063407
4,096
14,336
3.5
2.8953
-4.088001
1.500252
true
0.038731
dense
-4.040085
-0.582365
-1.411944
64
0.261598
4,096
21
4,032
1
0.413588
0.038731
6.754276
success
0.196801
0.000001
4,032
0.038731
0.003113
43
model.layers.6.mlp.gate_proj
0.068424
4,096
14,336
3.5
2.867297
-4.338855
1.52398
true
0.030675
dense
-4.113379
-0.478413
-1.513222
64
0.332344
4,096
19
4,032
1
0.428387
0.030675
10.834498
success
0.175142
0.000001
4,032
0.030675
0.004278
44
model.layers.6.mlp.up_proj
0.06568
4,096
14,336
3.5
2.65465
-3.628529
1.488785
true
0.042968
dense
-3.535722
-0.541657
-1.366858
64
0.287305
4,096
21
4,032
1
0.361074
0.042968
6.68653
success
0.207287
0.000001
4,032
0.042968
0.003247
45
model.layers.6.self_attn.k_proj
0.107168
1,024
4,096
4
3.180479
-6.081132
1.103667
true
0.012246
dense
-5.998892
-0.912065
-1.912018
64
0.122443
1,024
10
960
1
0.689528
0.012246
9.998901
success
0.11066
0.000001
960
0.012246
0.001901
46
model.layers.6.self_attn.o_proj
0.083761
4,096
4,096
1
2.793253
-4.715604
1.45369
true
0.020502
dense
-4.684449
-1.011275
-1.688212
64
0.097437
4,096
18
4,032
1
0.422674
0.020502
4.752665
success
0.143184
0
4,032
0.020502
0.001114
47
model.layers.6.self_attn.q_proj
0.059729
4,096
4,096
1
3.488664
-6.219897
1.529448
true
0.016486
dense
-5.980991
-0.696511
-1.782888
64
0.201136
4,096
27
4,032
1
0.478944
0.016486
12.200483
success
0.128397
0
4,032
0.016486
0.002489
48
model.layers.6.self_attn.v_proj
0.118223
1,024
4,096
4
2.76059
-6.355218
1.085289
true
0.004987
dense
-6.287284
-1.428893
-2.302124
64
0.037248
1,024
8
960
1
0.622462
0.004987
7.468451
success
0.070622
0.000001
960
0.004987
0.000601
49
model.layers.7.mlp.down_proj
0.076932
4,096
14,336
3.5
2.909538
-4.029174
1.494041
true
0.041227
dense
-3.992391
-0.58818
-1.384816
64
0.258119
4,096
19
4,032
1
0.438078
0.041227
6.260892
success
0.203045
0.000001
4,032
0.041227
0.003141
50
model.layers.7.mlp.gate_proj
0.075642
4,096
14,336
3.5
2.8637
-4.388342
1.528604
true
0.029349
dense
-4.1344
-0.467785
-1.532403
64
0.340577
4,096
16
4,032
1
0.465925
0.029349
11.60428
success
0.171316
0.000001
4,032
0.029349
0.004747
51
model.layers.7.mlp.up_proj
0.075565
4,096
14,336
3.5
2.645595
-3.624205
1.491818
true
0.042668
dense
-3.534071
-0.541343
-1.369902
64
0.287512
4,096
18
4,032
1
0.38787
0.042668
6.738426
success
0.206561
0.000001
4,032
0.042668
0.003534
52
model.layers.7.self_attn.k_proj
0.075297
1,024
4,096
4
5.854554
-11.795797
1.117029
true
0.009665
dense
-11.787982
-0.897848
-2.014807
64
0.126518
1,024
64
960
1
0.606819
0.009665
13.09058
success
0.09831
0.000001
960
0.009665
0.001517
53
model.layers.7.self_attn.o_proj
0.096781
4,096
4,096
1
2.43176
-4.131487
1.463078
true
0.02
dense
-4.082821
-1.014456
-1.69897
64
0.096726
4,096
9
4,032
1
0.477253
0.02
4.836306
success
0.141421
0
4,032
0.02
0.001397
54
model.layers.7.self_attn.q_proj
0.115064
4,096
4,096
1
3.258742
-6.096869
1.538012
true
0.013461
dense
-5.723896
-0.697313
-1.870927
64
0.200765
4,096
15
4,032
1
0.583205
0.013461
14.914696
success
0.116021
0
4,032
0.013461
0.00294
55
model.layers.7.self_attn.v_proj
0.130597
1,024
4,096
4
2.52747
-5.850276
1.092207
true
0.004845
dense
-5.727162
-1.403724
-2.314677
64
0.039471
1,024
7
960
1
0.577329
0.004845
8.146166
success
0.069608
0.000001
960
0.004845
0.000611
56
model.layers.8.mlp.down_proj
0.073526
4,096
14,336
3.5
2.82947
-3.75068
1.479343
true
0.047252
dense
-3.721924
-0.587064
-1.325577
64
0.258783
4,096
19
4,032
1
0.419709
0.047252
5.476626
success
0.217376
0.000001
4,032
0.047252
0.003003
57
model.layers.8.mlp.gate_proj
0.073727
4,096
14,336
3.5
2.940206
-4.352934
1.523923
true
0.033076
dense
-4.157378
-0.456073
-1.480486
64
0.349886
4,096
19
4,032
1
0.445114
0.033076
10.578228
success
0.181868
0.000001
4,032
0.033076
0.00456
58
model.layers.8.mlp.up_proj
0.087432
4,096
14,336
3.5
2.616947
-3.46615
1.481919
true
0.047369
dense
-3.387244
-0.533119
-1.324501
64
0.293009
4,096
19
4,032
1
0.370953
0.047369
6.185611
success
0.217645
0.000001
4,032
0.047369
0.003399
59
model.layers.8.self_attn.k_proj
0.1027
1,024
4,096
4
5.653809
-11.045854
1.103938
true
0.011125
dense
-11.030598
-0.935479
-1.953701
64
0.116017
1,024
64
960
1
0.581726
0.011125
10.428518
success
0.105475
0.000001
960
0.011125
0.001336
60
model.layers.8.self_attn.o_proj
0.09223
4,096
4,096
1
2.461574
-4.15559
1.454472
true
0.020503
dense
-4.108942
-1.018538
-1.688184
64
0.095821
4,096
10
4,032
1
0.46219
0.020503
4.673542
success
0.143188
0
4,032
0.020503
0.001352
61
model.layers.8.self_attn.q_proj
0.084565
4,096
4,096
1
3.152206
-5.503796
1.522169
true
0.017947
dense
-5.27548
-0.713958
-1.746014
64
0.193216
4,096
18
4,032
1
0.50728
0.017947
10.766051
success
0.133965
0
4,032
0.017947
0.00263
62
model.layers.8.self_attn.v_proj
0.111446
1,024
4,096
4
2.77145
-6.400331
1.088291
true
0.004905
dense
-6.323427
-1.418579
-2.30938
64
0.038144
1,024
8
960
1
0.626302
0.004905
7.7768
success
0.070034
0.000001
960
0.004905
0.000599
63
model.layers.9.mlp.down_proj
0.057908
4,096
14,336
3.5
2.742288
-3.60653
1.476515
true
0.0484
dense
-3.575599
-0.588558
-1.315154
64
0.257895
4,096
16
4,032
1
0.435572
0.0484
5.328387
success
0.22
0.000001
4,032
0.0484
0.003219
64
model.layers.9.mlp.gate_proj
0.060009
4,096
14,336
3.5
2.898617
-4.121823
1.515991
true
0.037845
dense
-3.977617
-0.461977
-1.421996
64
0.345162
4,096
17
4,032
1
0.460482
0.037845
9.120508
success
0.194537
0.000001
4,032
0.037845
0.004708
65
model.layers.9.mlp.up_proj
0.052531
4,096
14,336
3.5
2.673129
-3.463093
1.477563
true
0.050638
dense
-3.404474
-0.531199
-1.29552
64
0.294307
4,096
21
4,032
1
0.365107
0.050638
5.811941
success
0.22503
0.000001
4,032
0.050638
0.003268
66
model.layers.9.self_attn.k_proj
0.060317
1,024
4,096
4
6.679092
-15.305076
1.130864
true
0.005111
dense
-15.266985
-0.937667
-2.291491
64
0.115434
1,024
63
960
1
0.715498
0.005111
22.58519
success
0.071492
0.000001
under-trained
960
0.005111
0.001483
67
model.layers.9.self_attn.o_proj
0.089964
4,096
4,096
1
2.427394
-4.082041
1.448539
true
0.020813
dense
-4.047767
-1.042092
-1.681656
64
0.090763
4,096
11
4,032
1
0.430375
0.020813
4.360779
success
0.144269
0
4,032
0.020813
0.001153
68
model.layers.9.self_attn.q_proj
0.08705
4,096
4,096
1
3.412095
-6.577103
1.545219
true
0.011814
dense
-6.312881
-0.747492
-1.927585
64
0.178858
4,096
14
4,032
1
0.64466
0.011814
15.138855
success
0.108695
0
4,032
0.011814
0.002687
69
model.layers.9.self_attn.v_proj
0.09917
1,024
4,096
4
2.657595
-6.158284
1.08253
true
0.004817
dense
-6.091413
-1.466603
-2.317239
64
0.034151
1,024
8
960
1
0.586048
0.004817
7.08984
success
0.069403
0.000001
960
0.004817
0.000531
70
model.layers.10.mlp.down_proj
0.057096
4,096
14,336
3.5
2.766404
-3.423776
1.448927
true
0.057859
dense
-3.408703
-0.598612
-1.237627
64
0.251993
4,096
19
4,032
1
0.405241
0.057859
4.35527
success
0.24054
0.000001
4,032
0.057859
0.002774
71
model.layers.10.mlp.gate_proj
0.050086
4,096
14,336
3.5
2.870972
-3.932424
1.504084
true
0.042686
dense
-3.820135
-0.471952
-1.369718
64
0.337325
4,096
21
4,032
1
0.40828
0.042686
7.902541
success
0.206605
0.000001
4,032
0.042686
0.004129
72
model.layers.10.mlp.up_proj
0.059118
4,096
14,336
3.5
2.54132
-3.159267
1.457901
true
0.057127
dense
-3.113346
-0.549877
-1.24316
64
0.281918
4,096
17
4,032
1
0.373825
0.057127
4.93495
success
0.239012
0.000001
4,032
0.057127
0.003304
73
model.layers.10.self_attn.k_proj
0.079687
1,024
4,096
4
6.221469
-13.191227
1.12411
true
0.007581
dense
-13.173883
-0.904894
-2.120275
64
0.124482
1,024
64
960
1
0.652684
0.007581
16.420292
success
0.087069
0.000001
under-trained
960
0.007581
0.001542
74
model.layers.10.self_attn.o_proj
0.104255
4,096
4,096
1
2.39271
-3.957537
1.440596
true
0.022182
dense
-3.929246
-1.042654
-1.653998
64
0.090645
4,096
8
4,032
1
0.492397
0.022182
4.086422
success
0.148937
0
4,032
0.022182
0.001372
75
model.layers.10.self_attn.q_proj
0.102157
4,096
4,096
1
3.201262
-5.802851
1.532303
true
0.015393
dense
-5.534375
-0.723382
-1.812676
64
0.189068
4,096
15
4,032
1
0.568363
0.015393
12.282715
success
0.124069
0
4,032
0.015393
0.002681
76
model.layers.10.self_attn.v_proj
0.085466
1,024
4,096
4
2.820915
-6.457655
1.080996
true
0.005138
dense
-6.411704
-1.448764
-2.289206
64
0.035582
1,024
10
960
1
0.575824
0.005138
6.925353
success
0.07168
0.000001
960
0.005138
0.000503
77
model.layers.11.mlp.down_proj
0.080344
4,096
14,336
3.5
2.781667
-3.513401
1.45523
true
0.054569
dense
-3.497021
-0.608057
-1.263056
64
0.246571
4,096
17
4,032
1
0.432118
0.054569
4.518544
success
0.2336
0.000001
4,032
0.054569
0.00289
78
model.layers.11.mlp.gate_proj
0.052737
4,096
14,336
3.5
2.832129
-3.842099
1.504786
true
0.043993
dense
-3.741266
-0.467105
-1.356612
64
0.341111
4,096
16
4,032
1
0.458032
0.043993
7.753662
success
0.209746
0.000001
4,032
0.043993
0.004794
79
model.layers.11.mlp.up_proj
0.058418
4,096
14,336
3.5
2.744464
-3.448862
1.465464
true
0.055378
dense
-3.415026
-0.543832
-1.256662
64
0.285869
4,096
23
4,032
1
0.363746
0.055378
5.162137
success
0.235326
0.000001
4,032
0.055378
0.003069
80
model.layers.11.self_attn.k_proj
0.050394
1,024
4,096
4
5.234028
-11.270114
1.123817
true
0.007027
dense
-11.198865
-0.902614
-2.153239
64
0.125137
1,024
64
960
1
0.529253
0.007027
17.808403
success
0.083826
0.000001
960
0.007027
0.001477
81
model.layers.11.self_attn.o_proj
0.108526
4,096
4,096
1
2.377962
-3.887587
1.429546
true
0.023182
dense
-3.861041
-1.046776
-1.63484
64
0.089789
4,096
8
4,032
1
0.487183
0.023182
3.873146
success
0.152258
0
4,032
0.023182
0.001305
82
model.layers.11.self_attn.q_proj
0.083963
4,096
4,096
1
3.071511
-5.495301
1.52872
true
0.016251
dense
-5.23659
-0.71646
-1.78912
64
0.192106
4,096
16
4,032
1
0.517878
0.016251
11.821149
success
0.127479
0
4,032
0.016251
0.002715
83
model.layers.11.self_attn.v_proj
0.096823
1,024
4,096
4
2.730485
-6.100362
1.070588
true
0.005832
dense
-6.065863
-1.450516
-2.234168
64
0.035439
1,024
9
960
1
0.576828
0.005832
6.076487
success
0.076369
0.000001
960
0.005832
0.000501
84
model.layers.12.mlp.down_proj
0.089923
4,096
14,336
3.5
2.833527
-3.574279
1.452804
true
0.054774
dense
-3.559816
-0.612318
-1.261424
64
0.244164
4,096
18
4,032
1
0.432166
0.054774
4.457656
success
0.234039
0.000001
4,032
0.054774
0.002777
85
model.layers.12.mlp.gate_proj
0.053532
4,096
14,336
3.5
2.8049
-3.764999
1.501889
true
0.045468
dense
-3.680067
-0.478142
-1.342294
64
0.332551
4,096
16
4,032
1
0.451225
0.045468
7.313938
success
0.213232
0.000001
4,032
0.045468
0.004557
86
model.layers.12.mlp.up_proj
0.055524
4,096
14,336
3.5
2.565255
-3.136041
1.451193
true
0.059909
dense
-3.104937
-0.563025
-1.222506
64
0.273511
4,096
15
4,032
1
0.404147
0.059909
4.565421
success
0.244764
0.000001
4,032
0.059909
0.003387
87
model.layers.12.self_attn.k_proj
0.075259
1,024
4,096
4
7.086432
-16.088866
1.130188
true
0.005366
dense
-16.054192
-0.926907
-2.270376
64
0.118329
1,024
64
960
1
0.760804
0.005366
22.053078
success
0.073251
0.000001
under-trained
960
0.005366
0.001531
88
model.layers.12.self_attn.o_proj
0.098352
4,096
4,096
1
2.341469
-3.844098
1.422986
true
0.022817
dense
-3.816921
-1.066917
-1.641746
64
0.08572
4,096
9
4,032
1
0.447156
0.022817
3.756896
success
0.151052
0
4,032
0.022817
0.001114
89
model.layers.12.self_attn.q_proj
0.096271
4,096
4,096
1
3.514879
-6.437541
1.532037
true
0.01474
dense
-6.215437
-0.751836
-1.831512
64
0.177078
4,096
19
4,032
1
0.576953
0.01474
12.01366
success
0.121407
0
4,032
0.01474
0.002399
90
model.layers.12.self_attn.v_proj
0.125811
1,024
4,096
4
2.641311
-5.892274
1.064412
true
0.005877
dense
-5.860651
-1.479967
-2.230814
64
0.033116
1,024
7
960
1
0.620357
0.005877
5.634392
success
0.076664
0.000001
960
0.005877
0.000492
91
model.layers.13.mlp.down_proj
0.057336
4,096
14,336
3.5
2.697668
-3.599657
1.476768
true
0.046306
dense
-3.564414
-0.605902
-1.334359
64
0.247798
4,096
16
4,032
1
0.424417
0.046306
5.35127
success
0.215189
0.000001
4,032
0.046306
0.003034
92
model.layers.13.mlp.gate_proj
0.049988
4,096
14,336
3.5
2.897777
-4.073994
1.512724
true
0.039273
dense
-3.955254
-0.473035
-1.405903
64
0.336485
4,096
18
4,032
1
0.44731
0.039273
8.567781
success
0.198175
0.000001
4,032
0.039273
0.004496
93
model.layers.13.mlp.up_proj
0.050633
4,096
14,336
3.5
2.671905
-3.462605
1.47642
true
0.050591
dense
-3.418621
-0.554572
-1.295931
64
0.278887
4,096
16
4,032
1
0.417976
0.050591
5.512635
success
0.224923
0.000001
4,032
0.050591
0.003567
94
model.layers.13.self_attn.k_proj
0.066871
1,024
4,096
4
7.126741
-15.667061
1.127761
true
0.006334
dense
-15.652862
-0.92216
-2.198349
64
0.11963
1,024
62
960
1
0.778097
0.006334
18.888117
success
0.079584
0.000001
under-trained
960
0.006334
0.001548
95
model.layers.13.self_attn.o_proj
0.089577
4,096
4,096
1
2.459999
-3.993042
1.421071
true
0.023813
dense
-3.97649
-1.060992
-1.623188
64
0.086898
4,096
10
4,032
1
0.461692
0.023813
3.64919
success
0.154314
0
4,032
0.023813
0.001045
96
model.layers.13.self_attn.q_proj
0.099775
4,096
4,096
1
3.642384
-6.743105
1.539583
true
0.014084
dense
-6.55117
-0.731106
-1.851289
64
0.185735
4,096
16
4,032
1
0.660596
0.014084
13.188117
success
0.118674
0
4,032
0.014084
0.002735
97
model.layers.13.self_attn.v_proj
0.110842
1,024
4,096
4
2.716989
-6.058543
1.070317
true
0.00589
dense
-6.026413
-1.454266
-2.229874
64
0.035135
1,024
8
960
1
0.607047
0.00589
5.964974
success
0.076747
0.000001
960
0.00589
0.000493
98
model.layers.14.mlp.down_proj
0.051323
4,096
14,336
3.5
3.157358
-4.321053
1.500425
true
0.042799
dense
-4.305179
-0.576714
-1.368566
64
0.265024
4,096
18
4,032
1
0.508494
0.042799
6.192298
success
0.206879
0.000001
4,032
0.042799
0.003399
99
model.layers.14.mlp.gate_proj
0.052518
4,096
14,336
3.5
3.129804
-4.385769
1.518106
true
0.039692
dense
-4.279752
-0.439713
-1.401292
64
0.363318
4,096
23
4,032
1
0.444095
0.039692
9.153327
success
0.19923
0.000001
4,032
0.039692
0.00455