stas commited on
Commit
7469fb8
·
1 Parent(s): 3d98127
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. emb-norm/000-module.3.input_layernorm/events.out.tfevents.1637404942.r6i5n3.394761.5 +3 -0
  2. emb-norm/000-module.3.input_layernorm/events.out.tfevents.1638234920.r6i3n2.1319219.5 +3 -0
  3. emb-norm/000-module.3.mlp.dense_4h_to_h/events.out.tfevents.1637404943.r6i5n3.394761.13 +3 -0
  4. emb-norm/000-module.3.mlp.dense_h_to_4h/events.out.tfevents.1637404943.r6i5n3.394761.12 +3 -0
  5. emb-norm/000-module.3.mlp/events.out.tfevents.1637404943.r6i5n3.394761.14 +3 -0
  6. emb-norm/000-module.3.post_attention_layernorm/events.out.tfevents.1637404943.r6i5n3.394761.11 +3 -0
  7. emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1637404943.r6i5n3.394761.8 +3 -0
  8. emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.8 +3 -0
  9. emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1637404943.r6i5n3.394761.9 +3 -0
  10. emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1638234920.r6i3n2.1319219.9 +3 -0
  11. emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1637404943.r6i5n3.394761.6 +3 -0
  12. emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1638234920.r6i3n2.1319219.6 +3 -0
  13. emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1637404943.r6i5n3.394761.7 +3 -0
  14. emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1638234920.r6i3n2.1319219.7 +3 -0
  15. emb-norm/000-module.3.self_attention/events.out.tfevents.1637404943.r6i5n3.394761.10 +3 -0
  16. emb-norm/000-module.3.self_attention/events.out.tfevents.1638234920.r6i3n2.1319219.10 +3 -0
  17. emb-norm/000-module.3/events.out.tfevents.1637404944.r6i5n3.394761.15 +3 -0
  18. emb-norm/000-module.4.input_layernorm/events.out.tfevents.1637404944.r6i5n3.394761.16 +3 -0
  19. emb-norm/000-module.4.mlp.dense_4h_to_h/events.out.tfevents.1637404944.r6i5n3.394761.24 +3 -0
  20. emb-norm/000-module.4.mlp.dense_h_to_4h/events.out.tfevents.1637404944.r6i5n3.394761.23 +3 -0
  21. emb-norm/000-module.4.mlp/events.out.tfevents.1637404944.r6i5n3.394761.25 +3 -0
  22. emb-norm/000-module.4.post_attention_layernorm/events.out.tfevents.1637404944.r6i5n3.394761.22 +3 -0
  23. emb-norm/000-module.4.self_attention.attention_dropout/events.out.tfevents.1637404944.r6i5n3.394761.19 +3 -0
  24. emb-norm/000-module.4.self_attention.dense/events.out.tfevents.1637404944.r6i5n3.394761.20 +3 -0
  25. emb-norm/000-module.4.self_attention.query_key_value/events.out.tfevents.1637404944.r6i5n3.394761.17 +3 -0
  26. emb-norm/000-module.4.self_attention.scale_mask_softmax/events.out.tfevents.1637404944.r6i5n3.394761.18 +3 -0
  27. emb-norm/000-module.4.self_attention/events.out.tfevents.1637404944.r6i5n3.394761.21 +3 -0
  28. emb-norm/000-module.4/events.out.tfevents.1637404944.r6i5n3.394761.26 +3 -0
  29. emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1637404942.r6i5n3.394761.3 +3 -0
  30. emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.3 +3 -0
  31. emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1637404942.r6i5n3.394761.2 +3 -0
  32. emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.2 +3 -0
  33. emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1637404942.r6i5n3.394761.0 +3 -0
  34. emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1638234920.r6i3n2.1319219.0 +3 -0
  35. emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1637404942.r6i5n3.394761.1 +3 -0
  36. emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.1 +3 -0
  37. emb-norm/000-module.tied_modules.embed/events.out.tfevents.1637404942.r6i5n3.394761.4 +3 -0
  38. emb-norm/000-module.tied_modules.embed/events.out.tfevents.1638234920.r6i3n2.1319219.4 +3 -0
  39. emb-norm/000-module/events.out.tfevents.1637404944.r6i5n3.394761.27 +3 -0
  40. emb-norm/028-module.17.input_layernorm/events.out.tfevents.1637404955.r7i5n1.454973.0 +3 -0
  41. emb-norm/028-module.17.mlp.dense_4h_to_h/events.out.tfevents.1637404956.r7i5n1.454973.8 +3 -0
  42. emb-norm/028-module.17.mlp.dense_h_to_4h/events.out.tfevents.1637404956.r7i5n1.454973.7 +3 -0
  43. emb-norm/028-module.17.mlp/events.out.tfevents.1637404956.r7i5n1.454973.9 +3 -0
  44. emb-norm/028-module.17.post_attention_layernorm/events.out.tfevents.1637404956.r7i5n1.454973.6 +3 -0
  45. emb-norm/028-module.17.self_attention.attention_dropout/events.out.tfevents.1637404955.r7i5n1.454973.3 +3 -0
  46. emb-norm/028-module.17.self_attention.dense/events.out.tfevents.1637404955.r7i5n1.454973.4 +3 -0
  47. emb-norm/028-module.17.self_attention.query_key_value/events.out.tfevents.1637404955.r7i5n1.454973.1 +3 -0
  48. emb-norm/028-module.17.self_attention.scale_mask_softmax/events.out.tfevents.1637404955.r7i5n1.454973.2 +3 -0
  49. emb-norm/028-module.17.self_attention/events.out.tfevents.1637404955.r7i5n1.454973.5 +3 -0
  50. emb-norm/028-module.17/events.out.tfevents.1637404956.r7i5n1.454973.10 +3 -0
emb-norm/000-module.3.input_layernorm/events.out.tfevents.1637404942.r6i5n3.394761.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b74e459b8956ac38441241a70d9635f6c19856b379697f0aa2717e0866adb9fc
3
+ size 6708280
emb-norm/000-module.3.input_layernorm/events.out.tfevents.1638234920.r6i3n2.1319219.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd7a575b29528dca919c78d862b1a5d5c7b39c94ccbb4b077eaf785fb7edb6b7
3
+ size 95460328
emb-norm/000-module.3.mlp.dense_4h_to_h/events.out.tfevents.1637404943.r6i5n3.394761.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0692c4d70c69e16004759564b9a4dcc0e8e09f0d7d879cfcc904fd2881b7bba7
3
+ size 6771640
emb-norm/000-module.3.mlp.dense_h_to_4h/events.out.tfevents.1637404943.r6i5n3.394761.12 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4518fa3efbe4bb6da17bf8f0b3e2180d34d2ea6295b722bd5b2b6b738f774af
3
+ size 6771640
emb-norm/000-module.3.mlp/events.out.tfevents.1637404943.r6i5n3.394761.14 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:091d915f8d4b6fc2e58c3c79929ae249870d60a6813d8a9d1ac35e11120d1c49
3
+ size 3967960
emb-norm/000-module.3.post_attention_layernorm/events.out.tfevents.1637404943.r6i5n3.394761.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24b6b40922e00e05f675eed09345ba19773b775e0e7cc9cfe0f3aa7f5bb59a3d
3
+ size 6708280
emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1637404943.r6i5n3.394761.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5060808c8e5398e4785434f6f010e871e653a846c4300b6a1b5bc45b24d420f6
3
+ size 2922520
emb-norm/000-module.3.self_attention.attention_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5144848dc3744ff8ca2514dfde50a6b141f678f43e3763c069e67ed49d0e158
3
+ size 41587816
emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1637404943.r6i5n3.394761.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8451aee7ce07dc73ab78d7df36ec7732e8f87fee1d158170e08bc565a416ba43
3
+ size 6771640
emb-norm/000-module.3.self_attention.dense/events.out.tfevents.1638234920.r6i3n2.1319219.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dbc3c2cc4dcbab3e433809ce606340a93fdbd8711c6eea34e96ff6957816cc7e
3
+ size 96361960
emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1637404943.r6i5n3.394761.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d3eb760b3eca578ec41bbce734a6e923392641d7f39f55e753dd932b72bb93d
3
+ size 6264760
emb-norm/000-module.3.self_attention.query_key_value/events.out.tfevents.1638234920.r6i3n2.1319219.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc51af686450d29867044de81957983fbe081cfddd5d628b4b267fd342be118b
3
+ size 89148904
emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1637404943.r6i5n3.394761.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:265f9e9cae89771ab57103ec1b64e9709e818d9eeaf7a9ad4e96416d2117426f
3
+ size 2922520
emb-norm/000-module.3.self_attention.scale_mask_softmax/events.out.tfevents.1638234920.r6i3n2.1319219.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b9aad70056e54c203eee0de54d06a2330bbbf3650bb51982788b01247b8cadb
3
+ size 41587816
emb-norm/000-module.3.self_attention/events.out.tfevents.1637404943.r6i5n3.394761.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a3d80a06a109ed8493ee6fb48b28e840c55751384810e39d750f4e70d62afc4
3
+ size 3967960
emb-norm/000-module.3.self_attention/events.out.tfevents.1638234920.r6i3n2.1319219.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cafd612f62d1878b220490251ce43093b260b33e99822e8eabba4c3f30bca6b
3
+ size 56464744
emb-norm/000-module.3/events.out.tfevents.1637404944.r6i5n3.394761.15 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddb738c88f1bb949aa1978c94fdef7a2841e03b1f192fd3be62c0bfaf4cc89a1
3
+ size 3904600
emb-norm/000-module.4.input_layernorm/events.out.tfevents.1637404944.r6i5n3.394761.16 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27bb56edea08faae538d7e848346f587098e338e2bdeed148168e5fbd114e8af
3
+ size 6708280
emb-norm/000-module.4.mlp.dense_4h_to_h/events.out.tfevents.1637404944.r6i5n3.394761.24 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d5519dfb818f403393478ad23b1beb7c709f08a6d1af5e7ada0723e9875e84a
3
+ size 6771640
emb-norm/000-module.4.mlp.dense_h_to_4h/events.out.tfevents.1637404944.r6i5n3.394761.23 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94dea2cae2e8684f5c66a90b0a811801de36dd2ae594fc3ced1fcbe89140043e
3
+ size 6771640
emb-norm/000-module.4.mlp/events.out.tfevents.1637404944.r6i5n3.394761.25 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2484aa801b5d2ecba3dd3bdb7b8438c480c113f9cf178f43bb810c74a0f0e1cc
3
+ size 3967960
emb-norm/000-module.4.post_attention_layernorm/events.out.tfevents.1637404944.r6i5n3.394761.22 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68bb3f23585504ddb7e4647e46a67d480aff20302c4feab5ec881efbe374f134
3
+ size 6708280
emb-norm/000-module.4.self_attention.attention_dropout/events.out.tfevents.1637404944.r6i5n3.394761.19 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d94f5b1d6ab0e19b1d99de92888eb75abc0f0862676adad661a8db910c177c1
3
+ size 2922520
emb-norm/000-module.4.self_attention.dense/events.out.tfevents.1637404944.r6i5n3.394761.20 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21a3cc86aeab7b59859e6297025f77ddf9bb32dc80b06c9ae409b3a71e081759
3
+ size 6771640
emb-norm/000-module.4.self_attention.query_key_value/events.out.tfevents.1637404944.r6i5n3.394761.17 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e035cc0b1613b0962f92bd1420bdbedf5fdf47127b35d8443b066216b8a46b2
3
+ size 6264760
emb-norm/000-module.4.self_attention.scale_mask_softmax/events.out.tfevents.1637404944.r6i5n3.394761.18 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea6db3d1cfdcfb42d4e3a3f069227a2a37b90802eda472368398eb4315ef29d1
3
+ size 2922520
emb-norm/000-module.4.self_attention/events.out.tfevents.1637404944.r6i5n3.394761.21 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d205e51a083ddfdb6fa9b1f27abcda0edf8b7681bf9cb3f5a9dd03bed8b7a3f
3
+ size 3967960
emb-norm/000-module.4/events.out.tfevents.1637404944.r6i5n3.394761.26 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14720b761bfb212b303ccd2f358a0f360b562d5b08f71906435ee5df17137167
3
+ size 3904600
emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1637404942.r6i5n3.394761.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4391ea484ad71518de87c947e50806adc16255b6aa9e5b652bf9a4175a3d95e7
3
+ size 1956280
emb-norm/000-module.tied_modules.embed.embedding_dropout/events.out.tfevents.1638234920.r6i3n2.1319219.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbef8da9d84e68c2b4dd7f9dac3db4ef63411968fe529a2c54d564392ef4f3f1
3
+ size 27837928
emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1637404942.r6i5n3.394761.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57ef29922754abf95a8662e14782ad0dafaf2c3eef6d77ca8bb8df147dc4288f
3
+ size 2415640
emb-norm/000-module.tied_modules.embed.position_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:febdde68961be30187fdf28563cb3b71799d8020cb80014556a2b1f519f6e8be
3
+ size 34374760
emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1637404942.r6i5n3.394761.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f29303dbcc0fb90251735763f0b6577991da6d11dd5598c4414621fa349eda2f
3
+ size 4807480
emb-norm/000-module.tied_modules.embed.word_embeddings.norm/events.out.tfevents.1638234920.r6i3n2.1319219.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97d86cbda6271c419aad3b9ace19b0f05c5290dae0d4b85222259e1ae4b65db7
3
+ size 68411368
emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1637404942.r6i5n3.394761.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ee4b3832f8f9f63c63992c655b713021be0ed01eee1c90aedbbea68d8f8b388
3
+ size 3397720
emb-norm/000-module.tied_modules.embed.word_embeddings/events.out.tfevents.1638234920.r6i3n2.1319219.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e601206a038661448edb9e34bd19af0b50de35fe3c43e20234e98fabae2da7d1
3
+ size 48350056
emb-norm/000-module.tied_modules.embed/events.out.tfevents.1637404942.r6i5n3.394761.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6288ffb086f96dad3ccb36f8925ce4408db44757abcf60a77bba217c84d0b0d0
3
+ size 1465240
emb-norm/000-module.tied_modules.embed/events.out.tfevents.1638234920.r6i3n2.1319219.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe403bb83b2bb36eaa3b81f5761ae9748bca7bedaeef717ef836dc352c9102cc
3
+ size 20850280
emb-norm/000-module/events.out.tfevents.1637404944.r6i5n3.394761.27 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b83731ea4e6514cdcb57ec24d215fa30965877c282907a914dbaaf08512a489b
3
+ size 1465240
emb-norm/028-module.17.input_layernorm/events.out.tfevents.1637404955.r7i5n1.454973.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d86eec5aaa1c9c7c5deb551205d6811b7ff7b55ad74ecc4bd0f6dd71d851a67a
3
+ size 6708280
emb-norm/028-module.17.mlp.dense_4h_to_h/events.out.tfevents.1637404956.r7i5n1.454973.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8dbfecb2858507f22f898ed5b0e7f486e83efe01b7399c5723b8ccdd4e306b8
3
+ size 6771640
emb-norm/028-module.17.mlp.dense_h_to_4h/events.out.tfevents.1637404956.r7i5n1.454973.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ce7916943913798012e6c56c2ac83db6fdd45d64d7e227a7068d2c6434ed50b
3
+ size 6771640
emb-norm/028-module.17.mlp/events.out.tfevents.1637404956.r7i5n1.454973.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9f1ca54deb60c474972c46cbb811e2da928da6e3fce7349605e20d936d18772
3
+ size 3967960
emb-norm/028-module.17.post_attention_layernorm/events.out.tfevents.1637404956.r7i5n1.454973.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc479097a796e1426e19c8003454bcc0a694f15a12d96fd0b31cf804ccfc95bc
3
+ size 6708280
emb-norm/028-module.17.self_attention.attention_dropout/events.out.tfevents.1637404955.r7i5n1.454973.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a38648994ac80c4e3bf36f912f9450be745b952b821647a9e3e7bb2f0db5d02a
3
+ size 2922520
emb-norm/028-module.17.self_attention.dense/events.out.tfevents.1637404955.r7i5n1.454973.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:450a31858f0e013fc3dde9ce0517e5ef0a4253daaebbd8c669bd9409e56a95d3
3
+ size 6771640
emb-norm/028-module.17.self_attention.query_key_value/events.out.tfevents.1637404955.r7i5n1.454973.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f01c5a413495084791b333bc6da12fc6db22fc86f79c2c8f5978f4991eb4c3a
3
+ size 6264760
emb-norm/028-module.17.self_attention.scale_mask_softmax/events.out.tfevents.1637404955.r7i5n1.454973.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78e213d3365dd972eb301eb6c455a082242702855f7c18b6d8cf431ef9f1f6e5
3
+ size 2922520
emb-norm/028-module.17.self_attention/events.out.tfevents.1637404955.r7i5n1.454973.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f76fec3d96067952c0a18f1d590e0be74a860f04151ec935ac945e136b1f357c
3
+ size 3967960
emb-norm/028-module.17/events.out.tfevents.1637404956.r7i5n1.454973.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d0ae072e4188cc97dcb4f12302b8d37a14ec3b7b268e33dd469b52d9f2edeca
3
+ size 3904600