harishvs commited on
Commit
5dc32fa
1 Parent(s): d6d6b2a

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +10 -0
  2. checkpoint/config.json +29 -0
  3. checkpoint/generation_config.json +7 -0
  4. checkpoint/pytorch_model.bin/key_to_filename.json +3 -0
  5. checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight +3 -0
  6. checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight +3 -0
  7. checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight +3 -0
  8. checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight +3 -0
  9. checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight +3 -0
  10. checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight +3 -0
  11. checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight +3 -0
  12. checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight +3 -0
  13. checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight +3 -0
  14. checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight +3 -0
  15. checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight +3 -0
  16. checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight +3 -0
  17. checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight +3 -0
  18. checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight +3 -0
  19. checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight +3 -0
  20. checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight +3 -0
  21. checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight +3 -0
  22. checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight +3 -0
  23. checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight +3 -0
  24. checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight +3 -0
  25. checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight +3 -0
  26. checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight +3 -0
  27. checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight +3 -0
  28. checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight +3 -0
  29. checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight +3 -0
  30. checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight +3 -0
  31. checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight +3 -0
  32. checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight +3 -0
  33. checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight +3 -0
  34. checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight +3 -0
  35. checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight +3 -0
  36. checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight +3 -0
  37. checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight +3 -0
  38. checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight +3 -0
  39. checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight +3 -0
  40. checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight +3 -0
  41. checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight +3 -0
  42. checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight +3 -0
  43. checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight +3 -0
  44. checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight +3 -0
  45. checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight +3 -0
  46. checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight +3 -0
  47. checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight +3 -0
  48. checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight +3 -0
  49. checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight +3 -0
  50. checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight +3 -0
.gitattributes CHANGED
@@ -33,3 +33,13 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ compiled/1744545db8406da6398e.neff filter=lfs diff=lfs merge=lfs -text
37
+ compiled/1bb198a13076d1853641.neff filter=lfs diff=lfs merge=lfs -text
38
+ compiled/2e107447c6e0a7f19573.neff filter=lfs diff=lfs merge=lfs -text
39
+ compiled/51f8a515591560780d48.neff filter=lfs diff=lfs merge=lfs -text
40
+ compiled/6cab3b785bfcdd58ee6a.neff filter=lfs diff=lfs merge=lfs -text
41
+ compiled/a37ede77a746b866b69d.neff filter=lfs diff=lfs merge=lfs -text
42
+ compiled/b938f58ffd873c7146a7.neff filter=lfs diff=lfs merge=lfs -text
43
+ compiled/c3741198db69eb019273.neff filter=lfs diff=lfs merge=lfs -text
44
+ compiled/d632e5ec42990d4ee01b.neff filter=lfs diff=lfs merge=lfs -text
45
+ compiled/d95062416ef36beb6494.neff filter=lfs diff=lfs merge=lfs -text
checkpoint/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dolly_llama",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 11008,
14
+ "layerdrop": 0,
15
+ "max_position_embeddings": 4096,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 32,
19
+ "num_key_value_heads": 32,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 10000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.36.2",
27
+ "use_cache": false,
28
+ "vocab_size": 32000
29
+ }
checkpoint/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.36.2",
6
+ "use_cache": false
7
+ }
checkpoint/pytorch_model.bin/key_to_filename.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:825d20f4a18183eff3963e805edd13ef7eb35b0aff7a850e8153ca1eeeb37970
3
+ size 26397
checkpoint/pytorch_model.bin/p0.model.embed_tokens.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5028c748da088a303686a5793a3647c65de7b910d1dd7ec7b7151af156c87d97
3
+ size 524288789
checkpoint/pytorch_model.bin/p1.model.layers.0.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:514d6367f8e1161d040be54c946e72a9d6182659b5909a9422b470a1bbbb7fc1
3
+ size 67109756
checkpoint/pytorch_model.bin/p10.model.layers.1.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:407e025f19c80f4d55309bbe16e2c86732b5fbe75e5d0a1ee94f5f76cb836f4e
3
+ size 67109759
checkpoint/pytorch_model.bin/p100.model.layers.11.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69748cdd78a0520baf692507744bf6e12e6d3b1282ea87bb353517089f87eb91
3
+ size 67109765
checkpoint/pytorch_model.bin/p101.model.layers.11.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:728dc1421c04855eba7edeaaa8cbc6519a8acaa5d89cc122abbc4dc30af4e816
3
+ size 67109765
checkpoint/pytorch_model.bin/p102.model.layers.11.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2416a881e8e31db6e24cf51f094db04a838984780f6dd0b0df1933852d04230
3
+ size 67109765
checkpoint/pytorch_model.bin/p103.model.layers.11.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d955fa899081faf18c6ddfb72827c70359c2eb757c611592f7231bd9a7924b4c
3
+ size 67109765
checkpoint/pytorch_model.bin/p104.model.layers.11.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6d147bd4a815abb3504c599d1b9858b7c42c3d8dcf5493501730e96393c028a
3
+ size 180355964
checkpoint/pytorch_model.bin/p105.model.layers.11.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:844a8bf782647f7857536839e770abc649fcc7918daf5f710ab2cb63b6aca262
3
+ size 180355958
checkpoint/pytorch_model.bin/p106.model.layers.11.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2241d5dbf1ebbe5abd826d3c81c21e52cc9bca84a55581880e9932339a026244
3
+ size 180355964
checkpoint/pytorch_model.bin/p107.model.layers.11.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5306d15a655d708c223785e9d9ebfc7dc250089ab684e4b8c1042fdec737174b
3
+ size 17282
checkpoint/pytorch_model.bin/p108.model.layers.11.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:440dd212bb2e67c2ff83a2d42135f13589a04f622f41d523c241941e008ec1b8
3
+ size 17309
checkpoint/pytorch_model.bin/p109.model.layers.12.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb6678302d8f4208b0fa6e8607eb4c8e8b840c1c8d12e0964fb53b527cdebba9
3
+ size 67109765
checkpoint/pytorch_model.bin/p11.model.layers.1.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:171d7bbf31cd0008dc33d426993de322861e783ee43950e69b1a6751472883bd
3
+ size 67109759
checkpoint/pytorch_model.bin/p110.model.layers.12.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1672d3be6db8c943c1e855544e2c27571359ec47a7ece659edd8daf507895ad0
3
+ size 67109765
checkpoint/pytorch_model.bin/p111.model.layers.12.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1920ce84763c8ae8e60312bfddcb0f8926ccdb660d39be5a92b01507e453422d
3
+ size 67109765
checkpoint/pytorch_model.bin/p112.model.layers.12.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b22da7ae3539500e9ab6d071d02bcf9bb4c94f17a3c9fc58b2e9730442d0194e
3
+ size 67109765
checkpoint/pytorch_model.bin/p113.model.layers.12.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9245fede6470efa6a8e7958b06f360dea1289048397b71e8d6c9be72b9f8cdd8
3
+ size 180355964
checkpoint/pytorch_model.bin/p114.model.layers.12.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2444e1623566638db5f3147f1faca8346379416ff0e73fbe4280386dd17b1a98
3
+ size 180355958
checkpoint/pytorch_model.bin/p115.model.layers.12.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e49197c90e9ba30579eac7f2e4a3bc227f4707926e4ca50844d406f916bb1618
3
+ size 180355964
checkpoint/pytorch_model.bin/p116.model.layers.12.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2ea77617949c00cbc425d7c95179e59ea795ffc58a8908cd99051b8fdb8ce59
3
+ size 17282
checkpoint/pytorch_model.bin/p117.model.layers.12.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f42abe22e1afc5696515dfb5e91d0541aaf789bb8777d5f47c48726f7425c326
3
+ size 17309
checkpoint/pytorch_model.bin/p118.model.layers.13.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5868904964e463129762f406d2d51e6a784d2173c46e05a8219a70ca012555e
3
+ size 67109765
checkpoint/pytorch_model.bin/p119.model.layers.13.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d322bf0f6b7a2615d28d3b138100ab078e551500de593bf5ca938a341f1c379f
3
+ size 67109765
checkpoint/pytorch_model.bin/p12.model.layers.1.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ffc09a63610e579837a70c4ae1ad9fdd8641a2baf576b7f08c712301cc97a43b
3
+ size 67109759
checkpoint/pytorch_model.bin/p120.model.layers.13.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0476b54465f16472deb392fd0d93c977d68cfe7b1cc3fee5f6ec670230b7299b
3
+ size 67109765
checkpoint/pytorch_model.bin/p121.model.layers.13.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:099c8359163377c6db247bb30b965194de7dadaaa48d707f17ade8a8c14a5d6e
3
+ size 67109765
checkpoint/pytorch_model.bin/p122.model.layers.13.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7542b5f89ef6ab9b33ae9d7b087dd9c342096f3ca4a128d9faeb34eb8590283d
3
+ size 180355964
checkpoint/pytorch_model.bin/p123.model.layers.13.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5c28d414a09a7a24f3239b49ff3570163ad6b25825e9540b744794111e51345
3
+ size 180355958
checkpoint/pytorch_model.bin/p124.model.layers.13.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a57e0eb4c4293354123ba648bfcd94d6ffe6f26ceaea0f9e5394354190d2f79
3
+ size 180355964
checkpoint/pytorch_model.bin/p125.model.layers.13.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a20982118438409462766f4ac7c7cd4b1502cf9883dfd2d504c10f7293d1b358
3
+ size 17282
checkpoint/pytorch_model.bin/p126.model.layers.13.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14703c6f0d6c692a048a16ec17d3bd5bb8e925dc6024558931041df48fa6d46c
3
+ size 17309
checkpoint/pytorch_model.bin/p127.model.layers.14.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c33e18c2a7f08d78bbca905dd25760c4786018f5cbcb2af61458849dab0db78
3
+ size 67109765
checkpoint/pytorch_model.bin/p128.model.layers.14.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac9c7b7c1923bf7170e027799ffec98431d2410f3e21cf0ac0ee71ccad1cfcac
3
+ size 67109765
checkpoint/pytorch_model.bin/p129.model.layers.14.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a8705b1be9308648a773262592e2f485e342c14b0dc6fe00f469d5013452f37
3
+ size 67109765
checkpoint/pytorch_model.bin/p13.model.layers.1.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5459e021628682d6fda08bd19517f42c6e2bbb82ba0311b23dd7fa4e464088a4
3
+ size 67109759
checkpoint/pytorch_model.bin/p130.model.layers.14.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:200d5687c9002ea851154068c9510f32762ca656b60371aecfec5e9ea2baf1de
3
+ size 67109765
checkpoint/pytorch_model.bin/p131.model.layers.14.mlp.gate_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:738e61e61a4272d959fcac765e7f49486f1d1884a2e8bf39c3eb6abfa593fb55
3
+ size 180355964
checkpoint/pytorch_model.bin/p132.model.layers.14.mlp.up_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:606c8663437f5b7bcfb26a787f1a02d8cac12e53afda7364380fe8855c41ed54
3
+ size 180355958
checkpoint/pytorch_model.bin/p133.model.layers.14.mlp.down_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07a1d9df07af3ca4ec132478d8eb1ace4d022328cde08f6991236db94c808ce3
3
+ size 180355964
checkpoint/pytorch_model.bin/p134.model.layers.14.input_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:994d9105c9ca1564d496007338f9079bc49b99cbad10c472b91fba5921ef3e3a
3
+ size 17282
checkpoint/pytorch_model.bin/p135.model.layers.14.post_attention_layernorm.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b7ed46e4eafb9182394036bdc4980359ca7770a8fd7d739eedf232ecd89de23
3
+ size 17309
checkpoint/pytorch_model.bin/p136.model.layers.15.self_attn.q_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f6249d2f4e88878b06a8068d33b09669b7aa051eadf435e6131470c0aeaad2d6
3
+ size 67109765
checkpoint/pytorch_model.bin/p137.model.layers.15.self_attn.k_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:668134646677618e11423541bca44289eb7ebddeda6ce629fb1cd59626b8b130
3
+ size 67109765
checkpoint/pytorch_model.bin/p138.model.layers.15.self_attn.v_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:808073f5df28689094358ea813a9f4efc7b2365f7fcc8ba22c31cb704da3a5d7
3
+ size 67109765
checkpoint/pytorch_model.bin/p139.model.layers.15.self_attn.o_proj.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81a6f5169f98e702934deed0bb102d1fc95bbefc338e68dd5421264cb842ab28
3
+ size 67109765