Dataset Viewer
Auto-converted to Parquet
layer_id
int64
0
223
name
stringlengths
26
32
D
float64
0.03
0.21
M
int64
1.02k
4.1k
N
int64
4.1k
14.3k
Q
float64
1
4
alpha
float64
2.55
30.7
alpha_weighted
float64
-100.7
-4.79
entropy
float64
1.07
1.57
has_esd
bool
1 class
lambda_max
float32
0
0.01
layer_type
stringclasses
1 value
log_alpha_norm
float64
-100.63
-4.73
log_norm
float32
-1.66
-0.94
log_spectral_norm
float32
-3.31
-1.88
matrix_rank
int64
64
64
norm
float32
0.02
0.11
num_evals
int64
1.02k
4.1k
num_pl_spikes
int64
5
64
rank_loss
int64
960
4.03k
rf
int64
1
1
sigma
float64
0.35
9.03
spectral_norm
float32
0
0.01
stable_rank
float32
4.91
54.6
status
stringclasses
1 value
sv_max
float64
0.02
0.11
sv_min
float64
0
0
warning
stringclasses
2 values
weak_rank_loss
int64
960
4.03k
xmax
float64
0
0.01
xmin
float64
0
0
0
model.layers.0.mlp.down_proj
0.078037
4,096
14,336
3.5
4.053136
-8.624209
1.546217
true
0.007451
dense
-8.422872
-0.942671
-2.127786
64
0.114112
4,096
21
4,032
1
0.666249
0.007451
15.314962
success
0.086319
0.000001
4,032
0.007451
0.001594
1
model.layers.0.mlp.gate_proj
0.074845
4,096
14,336
3.5
3.074121
-6.048194
1.523045
true
0.010778
dense
-5.992284
-1.043336
-1.967455
64
0.090503
4,096
13
4,032
1
0.575258
0.010778
8.39689
success
0.103818
0.000001
4,032
0.010778
0.001346
2
model.layers.0.mlp.up_proj
0.073709
4,096
14,336
3.5
2.963981
-5.587149
1.50815
true
0.013031
dense
-5.539123
-1.029696
-1.885015
64
0.093391
4,096
18
4,032
1
0.462915
0.013031
7.166698
success
0.114154
0.000001
4,032
0.013031
0.001169
3
model.layers.0.self_attn.k_proj
0.041871
1,024
4,096
4
3.660928
-8.275696
1.091316
true
0.005489
dense
-8.234427
-1.295308
-2.260546
64
0.050663
1,024
34
960
1
0.456345
0.005489
9.230771
success
0.074084
0.000001
960
0.005489
0.000629
4
model.layers.0.self_attn.o_proj
0.075064
4,096
4,096
1
2.545702
-4.788104
1.446955
true
0.013157
dense
-4.725399
-1.189854
-1.880858
64
0.064587
4,096
19
4,032
1
0.354608
0.013157
4.90913
success
0.114702
0
4,032
0.013157
0.00069
5
model.layers.0.self_attn.q_proj
0.063552
4,096
4,096
1
3.702849
-8.79926
1.540747
true
0.004204
dense
-8.706662
-1.264196
-2.376349
64
0.054426
4,096
41
4,032
1
0.422114
0.004204
12.9465
success
0.064837
0
4,032
0.004204
0.000649
6
model.layers.0.self_attn.v_proj
0.065128
1,024
4,096
4
4.518371
-11.075607
1.074268
true
0.003538
dense
-11.073485
-1.633022
-2.451239
64
0.02328
1,024
64
960
1
0.439796
0.003538
6.579869
success
0.059481
0.000001
960
0.003538
0.000235
7
model.layers.1.mlp.down_proj
0.120789
4,096
14,336
3.5
3.760372
-8.12376
1.553696
true
0.006913
dense
-8.030754
-0.996069
-2.160361
64
0.100909
4,096
7
4,032
1
1.043323
0.006913
14.59795
success
0.083142
0.000001
4,032
0.006913
0.001736
8
model.layers.1.mlp.gate_proj
0.101213
4,096
14,336
3.5
3.073197
-6.50478
1.540912
true
0.007645
dense
-6.342643
-1.041639
-2.116617
64
0.090858
4,096
9
4,032
1
0.691066
0.007645
11.884425
success
0.087436
0.000001
4,032
0.007645
0.001464
9
model.layers.1.mlp.up_proj
0.120154
4,096
14,336
3.5
3.501113
-7.287698
1.536073
true
0.008288
dense
-7.149562
-1.016781
-2.081537
64
0.09621
4,096
15
4,032
1
0.645785
0.008288
11.607965
success
0.09104
0.000001
4,032
0.008288
0.001366
10
model.layers.1.self_attn.k_proj
0.056722
1,024
4,096
4
4.463136
-10.295389
1.102708
true
0.004934
dense
-10.254933
-1.28201
-2.306761
64
0.052238
1,024
62
960
1
0.439819
0.004934
10.586463
success
0.070246
0.000001
960
0.004934
0.00056
11
model.layers.1.self_attn.o_proj
0.082948
4,096
4,096
1
2.788825
-6.187573
1.512383
true
0.006044
dense
-5.846718
-1.19272
-2.218702
64
0.064162
4,096
17
4,032
1
0.433854
0.006044
10.616527
success
0.077741
0
4,032
0.006044
0.000848
12
model.layers.1.self_attn.q_proj
0.051761
4,096
4,096
1
4.367304
-9.032353
1.518836
true
0.008547
dense
-9.028883
-1.188359
-2.068176
64
0.06481
4,096
64
4,032
1
0.420913
0.008547
7.582582
success
0.092451
0
4,032
0.008547
0.000665
13
model.layers.1.self_attn.v_proj
0.084882
1,024
4,096
4
6.50357
-18.628179
1.1217
true
0.001367
dense
-18.610157
-1.656919
-2.864301
64
0.022033
1,024
64
960
1
0.687946
0.001367
16.120611
success
0.03697
0.000001
under-trained
960
0.001367
0.000273
14
model.layers.2.mlp.down_proj
0.141108
4,096
14,336
3.5
4.986315
-11.807773
1.561366
true
0.004285
dense
-11.631251
-1.010099
-2.368036
64
0.097701
4,096
12
4,032
1
1.15075
0.004285
22.800112
success
0.065461
0.000001
4,032
0.004285
0.001526
15
model.layers.2.mlp.gate_proj
0.09412
4,096
14,336
3.5
3.715742
-7.997589
1.549306
true
0.007041
dense
-7.919346
-1.032961
-2.152353
64
0.092691
4,096
10
4,032
1
0.858793
0.007041
13.164119
success
0.083912
0.000001
4,032
0.007041
0.001451
16
model.layers.2.mlp.up_proj
0.088223
4,096
14,336
3.5
3.476825
-7.241272
1.543075
true
0.008266
dense
-7.155042
-1.013239
-2.082725
64
0.096998
4,096
12
4,032
1
0.714998
0.008266
11.735083
success
0.090915
0.000001
4,032
0.008266
0.001439
17
model.layers.2.self_attn.k_proj
0.059055
1,024
4,096
4
6.324751
-16.606587
1.124112
true
0.002368
dense
-16.570013
-1.3895
-2.625651
64
0.040785
1,024
64
960
1
0.665594
0.002368
17.224663
success
0.04866
0.000001
under-trained
960
0.002368
0.000507
18
model.layers.2.self_attn.o_proj
0.054473
4,096
4,096
1
3.100158
-7.058452
1.526014
true
0.005287
dense
-6.920864
-1.264247
-2.276804
64
0.054419
4,096
21
4,032
1
0.458292
0.005287
10.293361
success
0.072711
0
4,032
0.005287
0.0007
19
model.layers.2.self_attn.q_proj
0.085959
4,096
4,096
1
8.018122
-20.184861
1.553734
true
0.003038
dense
-20.184489
-1.360992
-2.517405
64
0.043552
4,096
64
4,032
1
0.877265
0.003038
14.335519
success
0.055119
0
under-trained
4,032
0.003038
0.000566
20
model.layers.2.self_attn.v_proj
0.09117
1,024
4,096
4
7.027676
-20.602752
1.12715
true
0.00117
dense
-20.565834
-1.62459
-2.931659
64
0.023736
1,024
64
960
1
0.753459
0.00117
20.280077
success
0.034211
0.000001
under-trained
960
0.00117
0.000304
21
model.layers.3.mlp.down_proj
0.08119
4,096
14,336
3.5
5.102273
-12.319074
1.563152
true
0.003851
dense
-12.153177
-1.034317
-2.414429
64
0.092402
4,096
10
4,032
1
1.297253
0.003851
23.994524
success
0.062056
0.000001
4,032
0.003851
0.001462
22
model.layers.3.mlp.gate_proj
0.10023
4,096
14,336
3.5
3.712471
-8.48617
1.556851
true
0.005178
dense
-8.305467
-1.050676
-2.285855
64
0.088987
4,096
8
4,032
1
0.959003
0.005178
17.186176
success
0.071957
0.000001
4,032
0.005178
0.001423
23
model.layers.3.mlp.up_proj
0.11905
4,096
14,336
3.5
3.594004
-8.02471
1.553261
true
0.005851
dense
-7.847661
-1.035492
-2.232805
64
0.092153
4,096
9
4,032
1
0.864668
0.005851
15.75117
success
0.076489
0.000001
4,032
0.005851
0.001451
24
model.layers.3.self_attn.k_proj
0.104859
1,024
4,096
4
5.604087
-12.085917
1.085087
true
0.006972
dense
-12.084052
-1.287998
-2.156626
64
0.051523
1,024
64
960
1
0.575511
0.006972
7.389718
success
0.0835
0.000001
960
0.006972
0.000571
25
model.layers.3.self_attn.o_proj
0.063017
4,096
4,096
1
4.106814
-10.249573
1.550359
true
0.003193
dense
-10.072092
-1.276134
-2.495748
64
0.05295
4,096
22
4,032
1
0.662375
0.003193
16.581116
success
0.05651
0
4,032
0.003193
0.000741
26
model.layers.3.self_attn.q_proj
0.067876
4,096
4,096
1
3.348291
-7.814138
1.538209
true
0.004637
dense
-7.730213
-1.295588
-2.333769
64
0.05063
4,096
12
4,032
1
0.677893
0.004637
10.918955
success
0.068095
0
4,032
0.004637
0.000785
27
model.layers.3.self_attn.v_proj
0.070333
1,024
4,096
4
5.224446
-15.4996
1.13189
true
0.00108
dense
-15.310787
-1.573987
-2.966745
64
0.026669
1,024
11
960
1
1.273718
0.00108
24.70347
success
0.032857
0.000001
960
0.00108
0.000431
28
model.layers.4.mlp.down_proj
0.103412
4,096
14,336
3.5
6.499501
-16.271618
1.564917
true
0.003137
dense
-16.07889
-1.03764
-2.503518
64
0.091698
4,096
13
4,032
1
1.525287
0.003137
29.233288
success
0.056007
0.000001
under-trained
4,032
0.003137
0.001422
29
model.layers.4.mlp.gate_proj
0.116253
4,096
14,336
3.5
3.795076
-8.701769
1.557888
true
0.005094
dense
-8.544298
-1.054941
-2.29291
64
0.088117
4,096
7
4,032
1
1.056439
0.005094
17.296932
success
0.071375
0.000001
4,032
0.005094
0.001453
30
model.layers.4.mlp.up_proj
0.12004
4,096
14,336
3.5
4.187209
-9.434501
1.55538
true
0.005582
dense
-9.328923
-1.033906
-2.253172
64
0.09249
4,096
12
4,032
1
0.920068
0.005582
16.567825
success
0.074716
0.000001
4,032
0.005582
0.001405
31
model.layers.4.self_attn.k_proj
0.06627
1,024
4,096
4
6.227436
-16.95982
1.127202
true
0.001891
dense
-16.916808
-1.442925
-2.723403
64
0.036064
1,024
64
960
1
0.653429
0.001891
19.075619
success
0.043481
0.000001
under-trained
960
0.001891
0.00045
32
model.layers.4.self_attn.o_proj
0.051627
4,096
4,096
1
4.323554
-10.8611
1.552673
true
0.003076
dense
-10.770511
-1.30265
-2.512077
64
0.049814
4,096
22
4,032
1
0.708584
0.003076
16.196711
success
0.055458
0
4,032
0.003076
0.000711
33
model.layers.4.self_attn.q_proj
0.079045
4,096
4,096
1
7.006285
-18.277367
1.554457
true
0.002462
dense
-18.263869
-1.388304
-2.60871
64
0.040897
4,096
64
4,032
1
0.750786
0.002462
16.611397
success
0.049619
0
under-trained
4,032
0.002462
0.000519
34
model.layers.4.self_attn.v_proj
0.064339
1,024
4,096
4
8.255926
-25.286125
1.132694
true
0.000865
dense
-25.089855
-1.590087
-3.062785
64
0.025699
1,024
64
960
1
0.906991
0.000865
29.696016
success
0.029418
0.000001
under-trained
960
0.000865
0.000344
35
model.layers.5.mlp.down_proj
0.102415
4,096
14,336
3.5
6.801178
-17.267207
1.565843
true
0.002892
dense
-17.055313
-1.040575
-2.538856
64
0.09108
4,096
11
4,032
1
1.749121
0.002892
31.497822
success
0.053774
0.000001
under-trained
4,032
0.002892
0.001433
36
model.layers.5.mlp.gate_proj
0.101403
4,096
14,336
3.5
4.828013
-11.53821
1.561887
true
0.004075
dense
-11.414749
-1.061174
-2.389846
64
0.086861
4,096
9
4,032
1
1.276004
0.004075
21.314381
success
0.063838
0.000001
4,032
0.004075
0.00138
37
model.layers.5.mlp.up_proj
0.122957
4,096
14,336
3.5
4.840966
-11.489038
1.560318
true
0.004234
dense
-11.329233
-1.041366
-2.373294
64
0.090915
4,096
13
4,032
1
1.065292
0.004234
21.474775
success
0.065066
0.000001
4,032
0.004234
0.001391
38
model.layers.5.self_attn.k_proj
0.058986
1,024
4,096
4
6.511588
-17.164931
1.124793
true
0.002312
dense
-17.146604
-1.41215
-2.636059
64
0.038712
1,024
64
960
1
0.688948
0.002312
16.745928
success
0.048081
0.000001
under-trained
960
0.002312
0.000485
39
model.layers.5.self_attn.o_proj
0.102708
4,096
4,096
1
6.50579
-18.101157
1.563011
true
0.001651
dense
-17.909787
-1.336704
-2.782315
64
0.046057
4,096
25
4,032
1
1.101158
0.001651
27.900465
success
0.04063
0
under-trained
4,032
0.001651
0.000678
40
model.layers.5.self_attn.q_proj
0.075607
4,096
4,096
1
8.9117
-24.09903
1.560718
true
0.001976
dense
-24.090758
-1.387967
-2.704201
64
0.040929
4,096
64
4,032
1
0.988963
0.001976
20.712568
success
0.044453
0
under-trained
4,032
0.001976
0.00055
41
model.layers.5.self_attn.v_proj
0.05698
1,024
4,096
4
10.334181
-32.260235
1.135119
true
0.000756
dense
-32.222351
-1.613308
-3.121702
64
0.024361
1,024
64
960
1
1.166773
0.000756
32.239906
success
0.027488
0.000001
under-trained
960
0.000756
0.000339
42
model.layers.6.mlp.down_proj
0.106362
4,096
14,336
3.5
6.931822
-17.470068
1.565562
true
0.003018
dense
-17.290337
-1.030035
-2.520271
64
0.093318
4,096
14
4,032
1
1.585346
0.003018
30.919691
success
0.054937
0.000001
under-trained
4,032
0.003018
0.001441
43
model.layers.6.mlp.gate_proj
0.089191
4,096
14,336
3.5
4.654741
-10.97667
1.560834
true
0.004384
dense
-10.851597
-1.049342
-2.35817
64
0.08926
4,096
11
4,032
1
1.101946
0.004384
20.362329
success
0.066209
0.000001
4,032
0.004384
0.001363
44
model.layers.6.mlp.up_proj
0.105811
4,096
14,336
3.5
4.32534
-10.156714
1.559256
true
0.004486
dense
-9.939353
-1.027995
-2.348189
64
0.093757
4,096
13
4,032
1
0.922283
0.004486
20.902269
success
0.066974
0.000001
4,032
0.004486
0.001421
45
model.layers.6.self_attn.k_proj
0.056873
1,024
4,096
4
6.693871
-17.802754
1.12456
true
0.00219
dense
-17.785555
-1.437849
-2.65956
64
0.036488
1,024
63
960
1
0.71736
0.00219
16.661371
success
0.046797
0.000001
under-trained
960
0.00219
0.000461
46
model.layers.6.self_attn.o_proj
0.103141
4,096
4,096
1
5.320054
-14.340628
1.559961
true
0.002016
dense
-14.170697
-1.334167
-2.69558
64
0.046327
4,096
20
4,032
1
0.965993
0.002016
22.983286
success
0.044896
0
4,032
0.002016
0.000693
47
model.layers.6.self_attn.q_proj
0.075063
4,096
4,096
1
8.53185
-22.660099
1.559597
true
0.002208
dense
-22.658375
-1.38822
-2.655942
64
0.040905
4,096
64
4,032
1
0.941481
0.002208
18.523449
success
0.046993
0
under-trained
4,032
0.002208
0.000545
48
model.layers.6.self_attn.v_proj
0.053575
1,024
4,096
4
10.10451
-32.271476
1.135456
true
0.00064
dense
-31.934859
-1.60524
-3.193769
64
0.024818
1,024
57
960
1
1.205922
0.00064
38.773029
success
0.0253
0.000001
under-trained
960
0.00064
0.000349
49
model.layers.7.mlp.down_proj
0.131502
4,096
14,336
3.5
6.106007
-15.723402
1.566253
true
0.00266
dense
-15.247479
-1.027381
-2.575071
64
0.09389
4,096
7
4,032
1
1.929889
0.00266
35.293121
success
0.051578
0.000001
under-trained
4,032
0.00266
0.001575
50
model.layers.7.mlp.gate_proj
0.092991
4,096
14,336
3.5
4.66407
-11.037661
1.561423
true
0.0043
dense
-10.888869
-1.039225
-2.36653
64
0.091364
4,096
9
4,032
1
1.221357
0.0043
21.247375
success
0.065575
0.000001
4,032
0.0043
0.001464
51
model.layers.7.mlp.up_proj
0.083526
4,096
14,336
3.5
4.418851
-10.374051
1.559826
true
0.004491
dense
-10.158229
-1.018919
-2.347681
64
0.095737
4,096
12
4,032
1
0.986937
0.004491
21.318748
success
0.067013
0.000001
4,032
0.004491
0.001482
52
model.layers.7.self_attn.k_proj
0.092425
1,024
4,096
4
6.301261
-18.606098
1.133596
true
0.001115
dense
-18.203055
-1.425027
-2.952758
64
0.037581
1,024
47
960
1
0.773268
0.001115
33.707863
success
0.03339
0.000001
under-trained
960
0.001115
0.00051
53
model.layers.7.self_attn.o_proj
0.075342
4,096
4,096
1
9.002145
-25.429427
1.564116
true
0.001497
dense
-25.246398
-1.327605
-2.824819
64
0.047032
4,096
64
4,032
1
1.000268
0.001497
31.42049
success
0.038689
0
under-trained
4,032
0.001497
0.000639
54
model.layers.7.self_attn.q_proj
0.067324
4,096
4,096
1
8.272918
-22.371443
1.560991
true
0.001976
dense
-22.349378
-1.366129
-2.704178
64
0.04304
4,096
64
4,032
1
0.909115
0.001976
21.779566
success
0.044454
0
under-trained
4,032
0.001976
0.000572
55
model.layers.7.self_attn.v_proj
0.035828
1,024
4,096
4
11.606042
-36.820088
1.135943
true
0.000672
dense
-36.715384
-1.592729
-3.172493
64
0.025543
1,024
52
960
1
1.470793
0.000672
37.998268
success
0.025927
0.000001
under-trained
960
0.000672
0.000369
56
model.layers.8.mlp.down_proj
0.143326
4,096
14,336
3.5
7.349975
-18.750527
1.565933
true
0.002811
dense
-18.478513
-1.021602
-2.551101
64
0.095148
4,096
14
4,032
1
1.697102
0.002811
33.845333
success
0.053021
0.000001
under-trained
4,032
0.002811
0.001474
57
model.layers.8.mlp.gate_proj
0.096562
4,096
14,336
3.5
4.282198
-9.903676
1.559676
true
0.004867
dense
-9.748083
-1.023318
-2.312756
64
0.094772
4,096
9
4,032
1
1.094066
0.004867
19.473211
success
0.069763
0.000001
4,032
0.004867
0.00151
58
model.layers.8.mlp.up_proj
0.101449
4,096
14,336
3.5
4.470252
-10.296513
1.558773
true
0.004973
dense
-10.135558
-1.003884
-2.30334
64
0.09911
4,096
13
4,032
1
0.962475
0.004973
19.927673
success
0.070523
0.000001
4,032
0.004973
0.001524
59
model.layers.8.self_attn.k_proj
0.08686
1,024
4,096
4
6.646015
-16.279518
1.114718
true
0.003552
dense
-16.2774
-1.380645
-2.449516
64
0.041625
1,024
64
960
1
0.705752
0.003552
11.718461
success
0.059599
0.000001
under-trained
960
0.003552
0.000513
60
model.layers.8.self_attn.o_proj
0.068692
4,096
4,096
1
5.494895
-14.968555
1.562716
true
0.001888
dense
-14.748427
-1.30508
-2.724084
64
0.049536
4,096
13
4,032
1
1.24666
0.001888
26.242395
success
0.043447
0
4,032
0.001888
0.000804
61
model.layers.8.self_attn.q_proj
0.089713
4,096
4,096
1
7.238111
-17.969642
1.553421
true
0.003291
dense
-17.967233
-1.307936
-2.482643
64
0.049211
4,096
64
4,032
1
0.779764
0.003291
14.952259
success
0.057369
0
under-trained
4,032
0.003291
0.000627
62
model.layers.8.self_attn.v_proj
0.039211
1,024
4,096
4
10.635006
-33.443697
1.135642
true
0.000717
dense
-33.337394
-1.584424
-3.144681
64
0.026036
1,024
53
960
1
1.32347
0.000717
36.329201
success
0.026771
0.000001
under-trained
960
0.000717
0.000372
63
model.layers.9.mlp.down_proj
0.128309
4,096
14,336
3.5
8.653824
-22.723533
1.566809
true
0.002367
dense
-22.262528
-1.019453
-2.625837
64
0.09562
4,096
15
4,032
1
1.976209
0.002367
40.400311
success
0.04865
0.000001
under-trained
4,032
0.002367
0.001492
64
model.layers.9.mlp.gate_proj
0.08722
4,096
14,336
3.5
4.630853
-10.738622
1.560123
true
0.004798
dense
-10.615177
-1.01836
-2.318929
64
0.095861
4,096
10
4,032
1
1.148177
0.004798
19.978786
success
0.069268
0.000001
4,032
0.004798
0.001535
65
model.layers.9.mlp.up_proj
0.095441
4,096
14,336
3.5
4.509581
-10.436652
1.559562
true
0.004849
dense
-10.254104
-0.994814
-2.314328
64
0.101201
4,096
11
4,032
1
1.058179
0.004849
20.869593
success
0.069636
0.000001
4,032
0.004849
0.001637
66
model.layers.9.self_attn.k_proj
0.083743
1,024
4,096
4
11.978332
-36.619759
1.136268
true
0.000877
dense
-36.102986
-1.390942
-3.057167
64
0.04065
1,024
31
960
1
1.971767
0.000877
46.368629
success
0.029609
0.000001
under-trained
960
0.000877
0.000628
67
model.layers.9.self_attn.o_proj
0.063917
4,096
4,096
1
9.052659
-25.969677
1.56488
true
0.001353
dense
-25.562036
-1.315855
-2.868735
64
0.048322
4,096
64
4,032
1
1.006582
0.001353
35.717358
success
0.036782
0
under-trained
4,032
0.001353
0.000658
68
model.layers.9.self_attn.q_proj
0.068104
4,096
4,096
1
11.947367
-34.271361
1.566557
true
0.001354
dense
-34.250902
-1.33759
-2.868528
64
0.045963
4,096
48
4,032
1
1.580116
0.001354
33.957748
success
0.036791
0
under-trained
4,032
0.001354
0.00067
69
model.layers.9.self_attn.v_proj
0.036851
1,024
4,096
4
11.726856
-37.617797
1.136089
true
0.00062
dense
-37.503763
-1.617514
-3.207833
64
0.024126
1,024
43
960
1
1.63583
0.00062
38.933083
success
0.024893
0.000001
under-trained
960
0.00062
0.000355
70
model.layers.10.mlp.down_proj
0.13738
4,096
14,336
3.5
6.860931
-17.709316
1.565695
true
0.002623
dense
-17.151487
-1.01245
-2.581182
64
0.097174
4,096
15
4,032
1
1.513286
0.002623
37.04528
success
0.051216
0.000001
under-trained
4,032
0.002623
0.0015
71
model.layers.10.mlp.gate_proj
0.083743
4,096
14,336
3.5
3.814121
-8.29112
1.553266
true
0.006702
dense
-8.192316
-1.002268
-2.173796
64
0.099479
4,096
10
4,032
1
0.889903
0.006702
14.843208
success
0.081866
0.000001
4,032
0.006702
0.001555
72
model.layers.10.mlp.up_proj
0.083849
4,096
14,336
3.5
4.076868
-8.950159
1.554181
true
0.006377
dense
-8.837687
-0.985078
-2.195352
64
0.103496
4,096
13
4,032
1
0.85337
0.006377
16.228313
success
0.079859
0.000001
4,032
0.006377
0.001591
73
model.layers.10.self_attn.k_proj
0.056769
1,024
4,096
4
7.818972
-21.698837
1.132445
true
0.001678
dense
-21.653366
-1.368847
-2.775152
64
0.042771
1,024
58
960
1
0.895375
0.001678
25.486191
success
0.040966
0.000001
under-trained
960
0.001678
0.000576
74
model.layers.10.self_attn.o_proj
0.070628
4,096
4,096
1
8.901525
-24.73618
1.56414
true
0.001664
dense
-24.647737
-1.30816
-2.77887
64
0.049186
4,096
64
4,032
1
0.987691
0.001664
29.560369
success
0.040791
0
under-trained
4,032
0.001664
0.000667
75
model.layers.10.self_attn.q_proj
0.091471
4,096
4,096
1
9.544777
-25.029251
1.561299
true
0.002386
dense
-25.027851
-1.31183
-2.622298
64
0.048772
4,096
64
4,032
1
1.068097
0.002386
20.439404
success
0.048848
0
under-trained
4,032
0.002386
0.000663
76
model.layers.10.self_attn.v_proj
0.039781
1,024
4,096
4
13.460733
-43.727946
1.136434
true
0.000564
dense
-43.467368
-1.606999
-3.248556
64
0.024717
1,024
34
960
1
2.136998
0.000564
43.808399
success
0.023753
0.000001
under-trained
960
0.000564
0.000377
77
model.layers.11.mlp.down_proj
0.084076
4,096
14,336
3.5
8.331583
-21.69929
1.566558
true
0.002486
dense
-21.266779
-1.013571
-2.604462
64
0.096923
4,096
16
4,032
1
1.832896
0.002486
38.984375
success
0.049862
0.000001
under-trained
4,032
0.002486
0.00151
78
model.layers.11.mlp.gate_proj
0.124518
4,096
14,336
3.5
3.566869
-7.808087
1.554186
true
0.006471
dense
-7.641942
-0.990355
-2.189059
64
0.102246
4,096
8
4,032
1
0.907525
0.006471
15.801709
success
0.08044
0.000001
4,032
0.006471
0.001697
79
model.layers.11.mlp.up_proj
0.105866
4,096
14,336
3.5
4.111511
-9.150791
1.555739
true
0.005948
dense
-9.000263
-0.978656
-2.225652
64
0.105037
4,096
12
4,032
1
0.898216
0.005948
17.660204
success
0.077121
0.000001
4,032
0.005948
0.001658
80
model.layers.11.self_attn.k_proj
0.076034
1,024
4,096
4
6.886649
-17.54321
1.125219
true
0.002835
dense
-17.53927
-1.338794
-2.547423
64
0.045836
1,024
64
960
1
0.735831
0.002835
16.166996
success
0.053246
0.000001
under-trained
960
0.002835
0.000583
81
model.layers.11.self_attn.o_proj
0.082475
4,096
4,096
1
9.621523
-27.295947
1.564701
true
0.001456
dense
-27.014932
-1.307915
-2.836967
64
0.049214
4,096
64
4,032
1
1.07769
0.001456
33.810543
success
0.038152
0
under-trained
4,032
0.001456
0.000676
82
model.layers.11.self_attn.q_proj
0.092576
4,096
4,096
1
3.737231
-9.223967
1.553536
true
0.003403
dense
-9.073385
-1.267686
-2.468129
64
0.05399
4,096
10
4,032
1
0.865588
0.003403
15.865086
success
0.058336
0
4,032
0.003403
0.00086
83
model.layers.11.self_attn.v_proj
0.045068
1,024
4,096
4
12.227346
-39.194857
1.136222
true
0.000623
dense
-38.929056
-1.581387
-3.205508
64
0.026219
1,024
40
960
1
1.775199
0.000623
42.08437
success
0.02496
0.000001
under-trained
960
0.000623
0.00039
84
model.layers.12.mlp.down_proj
0.071737
4,096
14,336
3.5
10.463299
-27.652009
1.567254
true
0.002276
dense
-27.278975
-1.01537
-2.642762
64
0.096523
4,096
17
4,032
1
2.295187
0.002276
42.402534
success
0.047711
0.000001
under-trained
4,032
0.002276
0.001505
85
model.layers.12.mlp.gate_proj
0.093378
4,096
14,336
3.5
3.726353
-8.2903
1.556028
true
0.00596
dense
-8.120935
-0.997856
-2.224776
64
0.100495
4,096
8
4,032
1
0.963911
0.00596
16.8624
success
0.077199
0.000001
4,032
0.00596
0.001679
86
model.layers.12.mlp.up_proj
0.084262
4,096
14,336
3.5
4.286353
-9.666749
1.557471
true
0.005556
dense
-9.516003
-0.984565
-2.255239
64
0.103618
4,096
11
4,032
1
0.990873
0.005556
18.64978
success
0.074538
0.000001
4,032
0.005556
0.001669
87
model.layers.12.self_attn.k_proj
0.048592
1,024
4,096
4
5.293701
-15.09437
1.131777
true
0.001408
dense
-14.339401
-1.297373
-2.851383
64
0.050423
1,024
64
960
1
0.536713
0.001408
35.810429
success
0.037524
0.000001
960
0.001408
0.00061
88
model.layers.12.self_attn.o_proj
0.036461
4,096
4,096
1
10.498903
-30.738891
1.566372
true
0.001181
dense
-30.349195
-1.325973
-2.927819
64
0.047209
4,096
64
4,032
1
1.187363
0.001181
39.980358
success
0.034363
0
under-trained
4,032
0.001181
0.000659
89
model.layers.12.self_attn.q_proj
0.059794
4,096
4,096
1
10.446452
-27.443894
1.562977
true
0.00236
dense
-27.443376
-1.285311
-2.627102
64
0.051843
4,096
62
4,032
1
1.199701
0.00236
21.968027
success
0.048579
0
under-trained
4,032
0.00236
0.000719
90
model.layers.12.self_attn.v_proj
0.056912
1,024
4,096
4
14.830225
-48.769316
1.136817
true
0.000515
dense
-48.376759
-1.608922
-3.288508
64
0.024608
1,024
40
960
1
2.186751
0.000515
47.817432
success
0.022685
0.000001
under-trained
960
0.000515
0.000371
91
model.layers.13.mlp.down_proj
0.101172
4,096
14,336
3.5
9.722754
-25.602905
1.566878
true
0.002326
dense
-25.09933
-1.007937
-2.633298
64
0.098189
4,096
21
4,032
1
1.903461
0.002326
42.204708
success
0.048234
0.000001
under-trained
4,032
0.002326
0.001502
92
model.layers.13.mlp.gate_proj
0.076844
4,096
14,336
3.5
4.195623
-9.25822
1.555494
true
0.006214
dense
-9.17138
-0.996401
-2.206638
64
0.100832
4,096
12
4,032
1
0.922497
0.006214
16.226952
success
0.078828
0.000001
4,032
0.006214
0.001561
93
model.layers.13.mlp.up_proj
0.072975
4,096
14,336
3.5
4.54178
-10.238768
1.558146
true
0.005567
dense
-10.13164
-0.983272
-2.254352
64
0.103927
4,096
12
4,032
1
1.022424
0.005567
18.667231
success
0.074615
0.000001
4,032
0.005567
0.001665
94
model.layers.13.self_attn.k_proj
0.074733
1,024
4,096
4
8.151952
-22.214526
1.131022
true
0.001883
dense
-22.195854
-1.377887
-2.725056
64
0.04189
1,024
63
960
1
0.901061
0.001883
22.241776
success
0.043398
0.000001
under-trained
960
0.001883
0.000559
95
model.layers.13.self_attn.o_proj
0.030219
4,096
4,096
1
10.243985
-29.651683
1.566539
true
0.001275
dense
-29.323853
-1.290212
-2.894546
64
0.051261
4,096
64
4,032
1
1.155498
0.001275
40.209957
success
0.035705
0
under-trained
4,032
0.001275
0.000714
96
model.layers.13.self_attn.q_proj
0.089999
4,096
4,096
1
9.241785
-23.352984
1.559237
true
0.002972
dense
-23.352611
-1.2781
-2.526891
64
0.052711
4,096
64
4,032
1
1.030223
0.002972
17.733349
success
0.05452
0
under-trained
4,032
0.002972
0.00071
97
model.layers.13.self_attn.v_proj
0.06204
1,024
4,096
4
17.463952
-57.035094
1.136875
true
0.000542
dense
-56.694075
-1.577319
-3.265876
64
0.026466
1,024
24
960
1
3.36069
0.000542
48.815338
success
0.023284
0.000001
under-trained
960
0.000542
0.000418
98
model.layers.14.mlp.down_proj
0.082979
4,096
14,336
3.5
11.637098
-31.044017
1.567351
true
0.002149
dense
-30.417835
-0.998153
-2.667677
64
0.100426
4,096
24
4,032
1
2.171288
0.002149
46.722298
success
0.046362
0.000001
under-trained
4,032
0.002149
0.001538
99
model.layers.14.mlp.gate_proj
0.106847
4,096
14,336
3.5
4.425835
-9.716215
1.556191
true
0.006378
dense
-9.659478
-0.990614
-2.19534
64
0.102185
4,096
9
4,032
1
1.141945
0.006378
16.022362
success
0.07986
0.000001
4,032
0.006378
0.001713
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
3