layer_id
int64
0
223
name
stringlengths
26
32
D
float64
0.02
0.18
M
int64
1.02k
4.1k
N
int64
4.1k
14.3k
Q
float64
1
4
alpha
float64
3.57
40.4
alpha_weighted
float64
-123.19
-7.38
entropy
float64
1.1
1.57
has_esd
bool
1 class
lambda_max
float32
0
0.01
layer_type
stringclasses
1 value
log_alpha_norm
float64
-123.03
-6.97
log_norm
float32
-1.93
-0.76
log_spectral_norm
float32
-3.35
-2
matrix_rank
int64
64
64
norm
float32
0.01
0.17
num_evals
int64
1.02k
4.1k
num_pl_spikes
int64
7
64
rank_loss
int64
960
4.03k
rf
int64
1
1
sigma
float64
0.59
11.9
spectral_norm
float32
0
0.01
stable_rank
float32
14
56.5
status
stringclasses
1 value
sv_max
float64
0.02
0.1
sv_min
float64
0
0
warning
stringclasses
2 values
weak_rank_loss
int64
960
4.03k
xmax
float64
0
0.01
xmin
float64
0
0
100
model.layers.14.mlp.up_proj
0.070595
4,096
14,336
3.5
5.295959
-11.124345
1.560849
true
0.007934
dense
-11.020632
-0.761963
-2.100535
64
0.172996
4,096
17
4,032
1
1.041923
0.007934
21.805763
success
0.08907
0.000001
4,032
0.007934
0.002631
101
model.layers.14.self_attn.k_proj
0.108818
1,024
4,096
4
9.374769
-27.164726
1.136578
true
0.001266
dense
-26.117086
-1.18827
-2.897642
64
0.064823
1,024
60
960
1
1.081178
0.001266
51.21207
success
0.035578
0.000001
under-trained
960
0.001266
0.000903
102
model.layers.14.self_attn.o_proj
0.063818
4,096
4,096
1
10.309597
-27.056012
1.565609
true
0.002375
dense
-26.82605
-1.075338
-2.624352
64
0.084074
4,096
64
4,032
1
1.1637
0.002375
35.400898
success
0.048733
0
under-trained
4,032
0.002375
0.001168
103
model.layers.14.self_attn.q_proj
0.038597
4,096
4,096
1
10.648016
-27.996992
1.566439
true
0.002348
dense
-27.951509
-1.095221
-2.629315
64
0.080312
4,096
64
4,032
1
1.206002
0.002348
34.20536
success
0.048455
0
under-trained
4,032
0.002348
0.001123
104
model.layers.14.self_attn.v_proj
0.063967
1,024
4,096
4
18.977392
-56.967035
1.136897
true
0.000996
dense
-56.612306
-1.303714
-3.001837
64
0.049692
1,024
21
960
1
3.922989
0.000996
49.902519
success
0.031556
0.000001
under-trained
960
0.000996
0.000796
105
model.layers.15.mlp.down_proj
0.030025
4,096
14,336
3.5
19.888242
-49.551639
1.567968
true
0.003225
dense
-49.298357
-0.794197
-2.491504
64
0.160621
4,096
64
4,032
1
2.36103
0.003225
49.808956
success
0.056787
0.000001
under-trained
4,032
0.003225
0.002376
106
model.layers.15.mlp.gate_proj
0.068083
4,096
14,336
3.5
5.924201
-12.518264
1.561087
true
0.007708
dense
-12.462627
-0.777118
-2.113072
64
0.167064
4,096
22
4,032
1
1.049843
0.007708
21.674715
success
0.087794
0.000001
4,032
0.007708
0.002473
107
model.layers.15.mlp.up_proj
0.079643
4,096
14,336
3.5
5.669253
-12.220102
1.562827
true
0.00699
dense
-12.101856
-0.76957
-2.155505
64
0.169992
4,096
12
4,032
1
1.347897
0.00699
24.318357
success
0.083608
0.000001
4,032
0.00699
0.002777
108
model.layers.15.self_attn.k_proj
0.101475
1,024
4,096
4
10.378981
-29.608627
1.136357
true
0.001404
dense
-28.722908
-1.157424
-2.852749
64
0.069595
1,024
42
960
1
1.447208
0.001404
49.582108
success
0.037465
0.000001
under-trained
960
0.001404
0.001026
109
model.layers.15.self_attn.o_proj
0.058208
4,096
4,096
1
22.957791
-64.240785
1.567953
true
0.001591
dense
-64.025073
-1.092063
-2.798213
64
0.080898
4,096
18
4,032
1
5.175501
0.001591
50.83342
success
0.039893
0
under-trained
4,032
0.001591
0.0013
110
model.layers.15.self_attn.q_proj
0.058694
4,096
4,096
1
13.271464
-35.264829
1.567114
true
0.002202
dense
-35.242807
-1.089259
-2.657192
64
0.081422
4,096
46
4,032
1
1.809329
0.002202
36.977089
success
0.046925
0
under-trained
4,032
0.002202
0.001202
111
model.layers.15.self_attn.v_proj
0.11419
1,024
4,096
4
22.354517
-68.653411
1.137139
true
0.000849
dense
-67.828455
-1.325262
-3.07112
64
0.047287
1,024
26
960
1
4.187965
0.000849
55.700417
success
0.029137
0.000001
under-trained
960
0.000849
0.000747
112
model.layers.16.mlp.down_proj
0.064003
4,096
14,336
3.5
18.423216
-44.586561
1.567581
true
0.003801
dense
-44.537154
-0.789763
-2.420129
64
0.162269
4,096
64
4,032
1
2.177902
0.003801
42.693913
success
0.06165
0.000001
under-trained
4,032
0.003801
0.002387
113
model.layers.16.mlp.gate_proj
0.095854
4,096
14,336
3.5
6.133836
-12.7012
1.56011
true
0.008498
dense
-12.670542
-0.765247
-2.070678
64
0.171693
4,096
25
4,032
1
1.026767
0.008498
20.203686
success
0.092185
0.000001
under-trained
4,032
0.008498
0.002498
114
model.layers.16.mlp.up_proj
0.09054
4,096
14,336
3.5
6.53151
-13.916047
1.562146
true
0.007403
dense
-13.871134
-0.768526
-2.130602
64
0.170402
4,096
24
4,032
1
1.129115
0.007403
23.018433
success
0.08604
0.000001
under-trained
4,032
0.007403
0.002511
115
model.layers.16.self_attn.k_proj
0.115521
1,024
4,096
4
9.912147
-28.712586
1.136763
true
0.001269
dense
-27.50179
-1.163768
-2.896707
64
0.068585
1,024
60
960
1
1.150553
0.001269
54.067829
success
0.035616
0.000001
under-trained
960
0.001269
0.000962
116
model.layers.16.self_attn.o_proj
0.054619
4,096
4,096
1
21.033314
-58.536135
1.567931
true
0.001648
dense
-58.320719
-1.082587
-2.78302
64
0.082682
4,096
26
4,032
1
3.928856
0.001648
50.168678
success
0.040597
0
under-trained
4,032
0.001648
0.001299
117
model.layers.16.self_attn.q_proj
0.045936
4,096
4,096
1
13.197938
-34.586
1.56682
true
0.002396
dense
-34.576094
-1.080902
-2.620561
64
0.083004
4,096
41
4,032
1
1.904998
0.002396
34.646488
success
0.048946
0
under-trained
4,032
0.002396
0.001237
118
model.layers.16.self_attn.v_proj
0.078067
1,024
4,096
4
40.403046
-123.190286
1.137152
true
0.000893
dense
-123.031911
-1.316324
-3.049035
64
0.04827
1,024
11
960
1
11.880465
0.000893
54.039471
success
0.029887
0.000001
under-trained
960
0.000893
0.000805
119
model.layers.17.mlp.down_proj
0.06046
4,096
14,336
3.5
17.860272
-43.56073
1.567649
true
0.003639
dense
-43.471237
-0.794595
-2.438973
64
0.160474
4,096
64
4,032
1
2.107534
0.003639
44.093876
success
0.060327
0.000001
under-trained
4,032
0.003639
0.002356
120
model.layers.17.mlp.gate_proj
0.082128
4,096
14,336
3.5
5.495079
-11.656814
1.561849
true
0.007563
dense
-11.56735
-0.772201
-2.121319
64
0.168966
4,096
13
4,032
1
1.246711
0.007563
22.341782
success
0.086964
0.000001
4,032
0.007563
0.0027
121
model.layers.17.mlp.up_proj
0.063486
4,096
14,336
3.5
6.23721
-13.59188
1.563557
true
0.00662
dense
-13.499983
-0.775867
-2.17916
64
0.167545
4,096
14
4,032
1
1.399703
0.00662
25.310049
success
0.081362
0.000001
under-trained
4,032
0.00662
0.002663
122
model.layers.17.self_attn.k_proj
0.05194
1,024
4,096
4
13.234445
-38.008686
1.136307
true
0.001343
dense
-37.564366
-1.20944
-2.871952
64
0.061739
1,024
21
960
1
2.669775
0.001343
45.973885
success
0.036646
0.000001
under-trained
960
0.001343
0.000991
123
model.layers.17.self_attn.o_proj
0.033461
4,096
4,096
1
14.530256
-40.348406
1.567569
true
0.001672
dense
-39.98802
-1.106861
-2.776855
64
0.078188
4,096
37
4,032
1
2.22436
0.001672
46.772816
success
0.040886
0
under-trained
4,032
0.001672
0.001184
124
model.layers.17.self_attn.q_proj
0.034492
4,096
4,096
1
11.908328
-31.226268
1.566418
true
0.002387
dense
-31.21486
-1.107816
-2.622221
64
0.078016
4,096
47
4,032
1
1.591143
0.002387
32.689209
success
0.048853
0
under-trained
4,032
0.002387
0.001138
125
model.layers.17.self_attn.v_proj
0.090111
1,024
4,096
4
25.721496
-78.962157
1.137121
true
0.000851
dense
-78.326813
-1.329413
-3.06989
64
0.046837
1,024
21
960
1
5.394673
0.000851
55.01442
success
0.029178
0.000001
under-trained
960
0.000851
0.000753
126
model.layers.18.mlp.down_proj
0.035721
4,096
14,336
3.5
20.031384
-49.405232
1.567859
true
0.003417
dense
-49.336911
-0.806355
-2.466391
64
0.156187
4,096
64
4,032
1
2.378923
0.003417
45.712662
success
0.058453
0.000001
under-trained
4,032
0.003417
0.002311
127
model.layers.18.mlp.gate_proj
0.063541
4,096
14,336
3.5
5.057453
-10.494784
1.559962
true
0.008412
dense
-10.422957
-0.782374
-2.075113
64
0.165054
4,096
14
4,032
1
1.0844
0.008412
19.621803
success
0.091716
0.000001
4,032
0.008412
0.00255
128
model.layers.18.mlp.up_proj
0.063435
4,096
14,336
3.5
5.346702
-11.310891
1.561263
true
0.007665
dense
-11.227929
-0.787416
-2.115489
64
0.163149
4,096
14
4,032
1
1.161705
0.007665
21.284979
success
0.08755
0.000001
4,032
0.007665
0.002524
129
model.layers.18.self_attn.k_proj
0.113236
1,024
4,096
4
7.800461
-22.423279
1.136252
true
0.001335
dense
-21.267073
-1.168217
-2.874609
64
0.067887
1,024
62
960
1
0.863659
0.001335
50.861946
success
0.036534
0.000001
under-trained
960
0.001335
0.000915
130
model.layers.18.self_attn.o_proj
0.073369
4,096
4,096
1
11.414136
-29.839959
1.565693
true
0.002431
dense
-29.820689
-1.120096
-2.614299
64
0.075841
4,096
64
4,032
1
1.301767
0.002431
31.203423
success
0.0493
0
under-trained
4,032
0.002431
0.001066
131
model.layers.18.self_attn.q_proj
0.053146
4,096
4,096
1
12.021277
-31.476889
1.566602
true
0.002408
dense
-31.460674
-1.089056
-2.618431
64
0.08146
4,096
48
4,032
1
1.590784
0.002408
33.835728
success
0.049066
0
under-trained
4,032
0.002408
0.001188
132
model.layers.18.self_attn.v_proj
0.074868
1,024
4,096
4
31.943612
-97.858903
1.137107
true
0.000864
dense
-97.507446
-1.330271
-3.063489
64
0.046744
1,024
12
960
1
8.932651
0.000864
54.102608
success
0.029394
0.000001
under-trained
960
0.000864
0.000776
133
model.layers.19.mlp.down_proj
0.052651
4,096
14,336
3.5
23.80037
-59.519345
1.568035
true
0.003157
dense
-59.475479
-0.821488
-2.500774
64
0.150839
4,096
63
4,032
1
2.872577
0.003157
47.78442
success
0.056184
0.000001
under-trained
4,032
0.003157
0.002254
134
model.layers.19.mlp.gate_proj
0.074004
4,096
14,336
3.5
5.484326
-11.727247
1.561759
true
0.007272
dense
-11.654583
-0.803156
-2.13832
64
0.157342
4,096
14
4,032
1
1.198487
0.007272
21.635359
success
0.085279
0.000001
4,032
0.007272
0.002451
135
model.layers.19.mlp.up_proj
0.081407
4,096
14,336
3.5
5.57579
-12.189419
1.563044
true
0.006514
dense
-12.085657
-0.812214
-2.186133
64
0.154094
4,096
11
4,032
1
1.379653
0.006514
23.654802
success
0.080711
0.000001
4,032
0.006514
0.002491
136
model.layers.19.self_attn.k_proj
0.103896
1,024
4,096
4
9.422241
-27.63405
1.136375
true
0.001167
dense
-26.58249
-1.22558
-2.932853
64
0.059487
1,024
50
960
1
1.191085
0.001167
50.965183
success
0.034164
0.000001
under-trained
960
0.001167
0.000849
137
model.layers.19.self_attn.o_proj
0.044688
4,096
4,096
1
21.054933
-59.327508
1.567926
true
0.001521
dense
-59.104358
-1.11806
-2.817749
64
0.076197
4,096
23
4,032
1
4.181743
0.001521
50.082825
success
0.039005
0
under-trained
4,032
0.001521
0.001205
138
model.layers.19.self_attn.q_proj
0.043086
4,096
4,096
1
13.007356
-34.852457
1.566701
true
0.002092
dense
-34.840439
-1.140514
-2.679442
64
0.072358
4,096
32
4,032
1
2.122621
0.002092
34.588226
success
0.045738
0
under-trained
4,032
0.002092
0.001104
139
model.layers.19.self_attn.v_proj
0.085536
1,024
4,096
4
32.112979
-97.924641
1.137149
true
0.000893
dense
-97.656135
-1.317667
-3.049379
64
0.048121
1,024
17
960
1
7.546006
0.000893
53.915321
success
0.029875
0.000001
under-trained
960
0.000893
0.000785
140
model.layers.20.mlp.down_proj
0.06154
4,096
14,336
3.5
25.728871
-64.630048
1.568064
true
0.003076
dense
-64.612966
-0.836834
-2.511966
64
0.145602
4,096
63
4,032
1
3.115545
0.003076
47.329498
success
0.055465
0.000001
under-trained
4,032
0.003076
0.002184
141
model.layers.20.mlp.gate_proj
0.07428
4,096
14,336
3.5
5.7562
-12.622673
1.56318
true
0.006414
dense
-12.538929
-0.821344
-2.192883
64
0.150888
4,096
12
4,032
1
1.372997
0.006414
23.525513
success
0.080086
0.000001
4,032
0.006414
0.002385
142
model.layers.20.mlp.up_proj
0.067795
4,096
14,336
3.5
6.416853
-14.515735
1.564549
true
0.005469
dense
-14.415343
-0.835165
-2.262127
64
0.146162
4,096
13
4,032
1
1.502365
0.005469
26.727709
success
0.07395
0.000001
under-trained
4,032
0.005469
0.002298
143
model.layers.20.self_attn.k_proj
0.084596
1,024
4,096
4
16.050775
-47.779777
1.136508
true
0.001055
dense
-47.116229
-1.274841
-2.976789
64
0.053108
1,024
16
960
1
3.762694
0.001055
50.344112
success
0.032479
0.000001
under-trained
960
0.001055
0.00088
144
model.layers.20.self_attn.o_proj
0.095816
4,096
4,096
1
35.030403
-101.934399
1.568151
true
0.001231
dense
-101.410505
-1.157785
-2.909884
64
0.069537
4,096
16
4,032
1
8.507601
0.001231
56.506565
success
0.03508
0
under-trained
4,032
0.001231
0.001121
145
model.layers.20.self_attn.q_proj
0.043583
4,096
4,096
1
11.930345
-32.120584
1.566411
true
0.002031
dense
-32.11039
-1.180249
-2.692343
64
0.066032
4,096
39
4,032
1
1.750256
0.002031
32.515812
success
0.045064
0
under-trained
4,032
0.002031
0.000981
146
model.layers.20.self_attn.v_proj
0.113966
1,024
4,096
4
14.136786
-43.462944
1.137104
true
0.000842
dense
-42.349018
-1.328369
-3.074457
64
0.04695
1,024
53
960
1
1.804476
0.000842
55.729904
success
0.029025
0.000001
under-trained
960
0.000842
0.00069
147
model.layers.21.mlp.down_proj
0.069908
4,096
14,336
3.5
25.507698
-64.295016
1.568023
true
0.003016
dense
-64.282851
-0.853733
-2.520612
64
0.140045
4,096
61
4,032
1
3.137889
0.003016
46.438583
success
0.054915
0.000001
under-trained
4,032
0.003016
0.002102
148
model.layers.21.mlp.gate_proj
0.108281
4,096
14,336
3.5
5.774525
-12.953684
1.564314
true
0.005712
dense
-12.833288
-0.836879
-2.243247
64
0.145587
4,096
8
4,032
1
1.688049
0.005712
25.489885
success
0.075575
0.000001
4,032
0.005712
0.002424
149
model.layers.21.mlp.up_proj
0.093581
4,096
14,336
3.5
6.776335
-15.681946
1.565386
true
0.00485
dense
-15.571955
-0.857898
-2.314222
64
0.138708
4,096
11
4,032
1
1.741631
0.00485
28.597256
success
0.069645
0.000001
under-trained
4,032
0.00485
0.002209
150
model.layers.21.self_attn.k_proj
0.059948
1,024
4,096
4
11.910162
-35.071107
1.136015
true
0.001136
dense
-34.693005
-1.302377
-2.944637
64
0.049845
1,024
22
960
1
2.326054
0.001136
43.879395
success
0.033704
0.000001
under-trained
960
0.001136
0.000798
151
model.layers.21.self_attn.o_proj
0.074732
4,096
4,096
1
36.651508
-107.643644
1.568171
true
0.001156
dense
-107.320781
-1.191226
-2.93695
64
0.064383
4,096
13
4,032
1
9.887949
0.001156
55.683193
success
0.034004
0
under-trained
4,032
0.001156
0.001042
152
model.layers.21.self_attn.q_proj
0.047363
4,096
4,096
1
11.346615
-30.906445
1.566245
true
0.001889
dense
-30.892907
-1.217366
-2.723847
64
0.060623
4,096
46
4,032
1
1.525525
0.001889
32.098225
success
0.043459
0
under-trained
4,032
0.001889
0.000883
153
model.layers.21.self_attn.v_proj
0.113763
1,024
4,096
4
14.337243
-44.625051
1.137172
true
0.000772
dense
-43.490099
-1.362799
-3.112527
64
0.043371
1,024
56
960
1
1.782264
0.000772
56.198883
success
0.02778
0.000001
under-trained
960
0.000772
0.000635
154
model.layers.22.mlp.down_proj
0.066965
4,096
14,336
3.5
26.614543
-68.025058
1.568099
true
0.00278
dense
-67.988984
-0.866974
-2.555936
64
0.13584
4,096
62
4,032
1
3.25305
0.00278
48.86095
success
0.052727
0.000001
under-trained
4,032
0.00278
0.002042
155
model.layers.22.mlp.gate_proj
0.097833
4,096
14,336
3.5
6.001974
-13.491658
1.564459
true
0.005651
dense
-13.385668
-0.836949
-2.24787
64
0.145563
4,096
9
4,032
1
1.667325
0.005651
25.758545
success
0.075174
0.000001
under-trained
4,032
0.005651
0.002382
156
model.layers.22.mlp.up_proj
0.08121
4,096
14,336
3.5
6.807247
-15.786899
1.565366
true
0.004796
dense
-15.680063
-0.863387
-2.319131
64
0.136966
4,096
11
4,032
1
1.750951
0.004796
28.55909
success
0.069252
0.000001
under-trained
4,032
0.004796
0.00219
157
model.layers.22.self_attn.k_proj
0.101224
1,024
4,096
4
6.56137
-19.691763
1.135512
true
0.000997
dense
-18.594873
-1.324471
-3.001166
64
0.047373
1,024
64
960
1
0.695171
0.000997
47.500237
success
0.03158
0.000001
under-trained
960
0.000997
0.000613
158
model.layers.22.self_attn.o_proj
0.064047
4,096
4,096
1
14.630623
-41.505533
1.567305
true
0.001456
dense
-41.424312
-1.22554
-2.836895
64
0.059492
4,096
63
4,032
1
1.717297
0.001456
40.865242
success
0.038155
0
under-trained
4,032
0.001456
0.000861
159
model.layers.22.self_attn.q_proj
0.03411
4,096
4,096
1
9.809994
-26.862289
1.565564
true
0.001827
dense
-26.838374
-1.255902
-2.738257
64
0.055475
4,096
42
4,032
1
1.359412
0.001827
30.363724
success
0.042744
0
under-trained
4,032
0.001827
0.000807
160
model.layers.22.self_attn.v_proj
0.054654
1,024
4,096
4
21.498415
-66.059842
1.136995
true
0.000846
dense
-65.872133
-1.378539
-3.072777
64
0.041827
1,024
20
960
1
4.583585
0.000846
49.458221
success
0.029081
0.000001
under-trained
960
0.000846
0.000671
161
model.layers.23.mlp.down_proj
0.062324
4,096
14,336
3.5
27.278771
-70.935617
1.568166
true
0.00251
dense
-70.675347
-0.879847
-2.600396
64
0.131872
4,096
63
4,032
1
3.310814
0.00251
52.547184
success
0.050096
0.000001
under-trained
4,032
0.00251
0.001983
162
model.layers.23.mlp.gate_proj
0.102704
4,096
14,336
3.5
6.167049
-13.731182
1.564025
true
0.005936
dense
-13.667456
-0.841866
-2.22654
64
0.143924
4,096
9
4,032
1
1.72235
0.005936
24.247877
success
0.077042
0.000001
under-trained
4,032
0.005936
0.002354
163
model.layers.23.mlp.up_proj
0.072697
4,096
14,336
3.5
7.470926
-17.248557
1.565169
true
0.004912
dense
-17.20196
-0.8729
-2.308758
64
0.133998
4,096
13
4,032
1
1.794712
0.004912
27.280809
success
0.070084
0.000001
under-trained
4,032
0.004912
0.00211
164
model.layers.23.self_attn.k_proj
0.050034
1,024
4,096
4
9.433142
-28.276416
1.135838
true
0.001006
dense
-27.622812
-1.349703
-2.997561
64
0.044699
1,024
48
960
1
1.217219
0.001006
44.448589
success
0.031712
0.000001
under-trained
960
0.001006
0.000639
165
model.layers.23.self_attn.o_proj
0.073546
4,096
4,096
1
5.384058
-14.183412
1.56385
true
0.002321
dense
-13.935122
-1.200379
-2.634335
64
0.063041
4,096
12
4,032
1
1.265568
0.002321
27.161652
success
0.048176
0
4,032
0.002321
0.00099
166
model.layers.23.self_attn.q_proj
0.026508
4,096
4,096
1
10.00856
-27.863208
1.565898
true
0.001645
dense
-27.811764
-1.269355
-2.783938
64
0.053783
4,096
49
4,032
1
1.286937
0.001645
32.702648
success
0.040554
0
under-trained
4,032
0.001645
0.000769
167
model.layers.23.self_attn.v_proj
0.066253
1,024
4,096
4
17.725424
-54.425669
1.136923
true
0.00085
dense
-53.875092
-1.360398
-3.070486
64
0.043612
1,024
26
960
1
3.280126
0.00085
51.296551
success
0.029158
0.000001
under-trained
960
0.00085
0.000687
168
model.layers.24.mlp.down_proj
0.088589
4,096
14,336
3.5
25.547802
-65.549568
1.568011
true
0.002718
dense
-65.519276
-0.893975
-2.565762
64
0.127651
4,096
63
4,032
1
3.092732
0.002718
46.966293
success
0.052134
0.000001
under-trained
4,032
0.002718
0.001913
169
model.layers.24.mlp.gate_proj
0.116326
4,096
14,336
3.5
5.891471
-13.039916
1.563655
true
0.006119
dense
-12.980163
-0.84982
-2.213355
64
0.141312
4,096
8
4,032
1
1.729396
0.006119
23.09589
success
0.078221
0.000001
4,032
0.006119
0.002318
170
model.layers.24.mlp.up_proj
0.084695
4,096
14,336
3.5
7.723819
-17.807934
1.564945
true
0.004948
dense
-17.779779
-0.88595
-2.305587
64
0.130032
4,096
14
4,032
1
1.797016
0.004948
26.280695
success
0.070341
0.000001
under-trained
4,032
0.004948
0.002026
171
model.layers.24.self_attn.k_proj
0.042094
1,024
4,096
4
9.26434
-27.923382
1.135516
true
0.000968
dense
-27.431276
-1.393554
-3.014071
64
0.040406
1,024
40
960
1
1.306707
0.000968
41.736637
success
0.031115
0.000001
under-trained
960
0.000968
0.000592
172
model.layers.24.self_attn.o_proj
0.063429
4,096
4,096
1
18.543035
-53.72646
1.567791
true
0.001267
dense
-53.706925
-1.265283
-2.897393
64
0.05429
4,096
58
4,032
1
2.303513
0.001267
42.865685
success
0.035588
0
under-trained
4,032
0.001267
0.000804
173
model.layers.24.self_attn.q_proj
0.03856
4,096
4,096
1
10.280278
-28.986392
1.565957
true
0.001515
dense
-28.963955
-1.323311
-2.819612
64
0.0475
4,096
49
4,032
1
1.325754
0.001515
31.354559
success
0.038922
0
under-trained
4,032
0.001515
0.000681
174
model.layers.24.self_attn.v_proj
0.059587
1,024
4,096
4
26.995273
-83.393638
1.137081
true
0.000814
dense
-83.276914
-1.383491
-3.089194
64
0.041353
1,024
16
960
1
6.498818
0.000814
50.781246
success
0.028537
0.000001
under-trained
960
0.000814
0.000675
175
model.layers.25.mlp.down_proj
0.088452
4,096
14,336
3.5
22.674786
-57.540608
1.567786
true
0.0029
dense
-57.530036
-0.90192
-2.537647
64
0.125337
4,096
64
4,032
1
2.709348
0.0029
43.224232
success
0.053849
0.000001
under-trained
4,032
0.0029
0.001866
176
model.layers.25.mlp.gate_proj
0.115405
4,096
14,336
3.5
7.173909
-15.938461
1.563881
true
0.006002
dense
-15.919066
-0.851489
-2.221726
64
0.14077
4,096
12
4,032
1
1.782254
0.006002
23.455097
success
0.077471
0.000001
under-trained
4,032
0.006002
0.002222
177
model.layers.25.mlp.up_proj
0.117501
4,096
14,336
3.5
7.724192
-17.831249
1.564917
true
0.004915
dense
-17.805651
-0.893028
-2.308494
64
0.12793
4,096
13
4,032
1
1.864955
0.004915
26.029451
success
0.070106
0.000001
under-trained
4,032
0.004915
0.002006
178
model.layers.25.self_attn.k_proj
0.102301
1,024
4,096
4
7.238527
-22.14815
1.135898
true
0.000871
dense
-21.109452
-1.379533
-3.059759
64
0.041732
1,024
62
960
1
0.792294
0.000871
47.887905
success
0.02952
0.000001
under-trained
960
0.000871
0.000555
179
model.layers.25.self_attn.o_proj
0.072905
4,096
4,096
1
15.151347
-43.711322
1.567377
true
0.001303
dense
-43.662192
-1.280084
-2.884979
64
0.052471
4,096
63
4,032
1
1.782902
0.001303
40.262028
success
0.0361
0
under-trained
4,032
0.001303
0.000762
180
model.layers.25.self_attn.q_proj
0.024015
4,096
4,096
1
10.113745
-28.551966
1.565922
true
0.001503
dense
-28.520994
-1.319469
-2.823085
64
0.047922
4,096
40
4,032
1
1.44101
0.001503
31.887217
success
0.038767
0
under-trained
4,032
0.001503
0.000703
181
model.layers.25.self_attn.v_proj
0.050871
1,024
4,096
4
22.102889
-67.771573
1.13697
true
0.000859
dense
-67.671703
-1.382476
-3.066186
64
0.04145
1,024
20
960
1
4.718749
0.000859
48.273659
success
0.029303
0.000001
under-trained
960
0.000859
0.000666
182
model.layers.26.mlp.down_proj
0.099921
4,096
14,336
3.5
21.817002
-54.155571
1.567389
true
0.003294
dense
-54.155059
-0.910668
-2.482265
64
0.122838
4,096
64
4,032
1
2.602125
0.003294
37.290382
success
0.057394
0.000001
under-trained
4,032
0.003294
0.001823
183
model.layers.26.mlp.gate_proj
0.08999
4,096
14,336
3.5
6.457537
-14.237801
1.563379
true
0.00624
dense
-14.209257
-0.85408
-2.204835
64
0.139933
4,096
11
4,032
1
1.645509
0.00624
22.426134
success
0.078992
0.000001
under-trained
4,032
0.00624
0.002202
184
model.layers.26.mlp.up_proj
0.100135
4,096
14,336
3.5
6.472845
-14.783802
1.564256
true
0.0052
dense
-14.732363
-0.893702
-2.283973
64
0.127732
4,096
11
4,032
1
1.650125
0.0052
24.562418
success
0.072113
0.000001
under-trained
4,032
0.0052
0.002022
185
model.layers.26.self_attn.k_proj
0.030124
1,024
4,096
4
8.838954
-26.360515
1.135024
true
0.001042
dense
-26.077297
-1.411301
-2.982311
64
0.038788
1,024
34
960
1
1.34437
0.001042
37.240086
success
0.032273
0.000001
under-trained
960
0.001042
0.000578
186
model.layers.26.self_attn.o_proj
0.080145
4,096
4,096
1
4.237009
-11.025414
1.560244
true
0.002499
dense
-10.726383
-1.24904
-2.602169
64
0.056359
4,096
12
4,032
1
0.934444
0.002499
22.549124
success
0.049994
0
4,032
0.002499
0.000862
187
model.layers.26.self_attn.q_proj
0.036869
4,096
4,096
1
8.40632
-23.18886
1.564288
true
0.001744
dense
-23.157452
-1.324999
-2.758503
64
0.047315
4,096
64
4,032
1
0.92579
0.001744
27.133411
success
0.041759
0
under-trained
4,032
0.001744
0.000637
188
model.layers.26.self_attn.v_proj
0.06321
1,024
4,096
4
14.601195
-43.401385
1.136401
true
0.001065
dense
-43.379188
-1.386946
-2.972454
64
0.041025
1,024
38
960
1
2.206405
0.001065
38.5042
success
0.032642
0.000001
under-trained
960
0.001065
0.00062
189
model.layers.27.mlp.down_proj
0.125634
4,096
14,336
3.5
20.134975
-48.749573
1.566567
true
0.003792
dense
-48.749521
-0.922233
-2.421139
64
0.11961
4,096
64
4,032
1
2.391872
0.003792
31.543194
success
0.061579
0.000001
under-trained
4,032
0.003792
0.001763
190
model.layers.27.mlp.gate_proj
0.080541
4,096
14,336
3.5
5.975536
-12.835613
1.561437
true
0.007112
dense
-12.813628
-0.853734
-2.148027
64
0.140044
4,096
12
4,032
1
1.436314
0.007112
19.692146
success
0.084331
0.000001
4,032
0.007112
0.002181
191
model.layers.27.mlp.up_proj
0.090913
4,096
14,336
3.5
5.596395
-12.553345
1.56291
true
0.005713
dense
-12.483229
-0.893531
-2.243113
64
0.127782
4,096
10
4,032
1
1.453508
0.005713
22.365685
success
0.075586
0.000001
4,032
0.005713
0.002055
192
model.layers.27.self_attn.k_proj
0.061444
1,024
4,096
4
10.763951
-32.338098
1.135765
true
0.00099
dense
-32.04649
-1.391835
-3.004296
64
0.040566
1,024
27
960
1
1.879073
0.00099
40.969563
success
0.031467
0.000001
under-trained
960
0.00099
0.000631
193
model.layers.27.self_attn.o_proj
0.09495
4,096
4,096
1
11.720985
-32.742357
1.565708
true
0.001609
dense
-32.718908
-1.288713
-2.793482
64
0.051438
4,096
64
4,032
1
1.340123
0.001609
31.971897
success
0.040111
0
under-trained
4,032
0.001609
0.000725
194
model.layers.27.self_attn.q_proj
0.030258
4,096
4,096
1
9.312165
-26.042982
1.565224
true
0.001597
dense
-26.018858
-1.331789
-2.796662
64
0.046581
4,096
51
4,032
1
1.163936
0.001597
29.165789
success
0.039964
0
under-trained
4,032
0.001597
0.000656
195
model.layers.27.self_attn.v_proj
0.054073
1,024
4,096
4
16.699773
-50.638051
1.136774
true
0.000928
dense
-50.55997
-1.387949
-3.03226
64
0.040931
1,024
31
960
1
2.819762
0.000928
44.087074
success
0.03047
0.000001
under-trained
960
0.000928
0.000633
196
model.layers.28.mlp.down_proj
0.147856
4,096
14,336
3.5
17.659206
-40.82923
1.564656
true
0.004875
dense
-40.829228
-0.920379
-2.312065
64
0.120122
4,096
64
4,032
1
2.082401
0.004875
24.642588
success
0.069818
0.000001
under-trained
4,032
0.004875
0.001747
197
model.layers.28.mlp.gate_proj
0.091559
4,096
14,336
3.5
5.433214
-11.60878
1.560451
true
0.007301
dense
-11.576672
-0.863321
-2.136632
64
0.136987
4,096
11
4,032
1
1.336664
0.007301
18.763388
success
0.085444
0.000001
4,032
0.007301
0.002153
198
model.layers.28.mlp.up_proj
0.081664
4,096
14,336
3.5
5.483033
-12.276219
1.562416
true
0.005768
dense
-12.196294
-0.893248
-2.238947
64
0.127865
4,096
11
4,032
1
1.351685
0.005768
22.166574
success
0.07595
0.000001
4,032
0.005768
0.002042
199
model.layers.28.self_attn.k_proj
0.049955
1,024
4,096
4
8.914576
-27.248532
1.135713
true
0.000878
dense
-26.40493
-1.391175
-3.056627
64
0.040628
1,024
45
960
1
1.179835
0.000878
46.28624
success
0.029627
0.000001
under-trained
960
0.000878
0.000583