nglain commited on
Commit
f0ac316
1 Parent(s): a563178

Upload folder using huggingface_hub (#1)

Browse files

- 39b0efcb50baf668a0cb3371efdd7de25e50d4b496d48f0eefafe28efd438f6f (3e4978b1bde92cbba7720090301db4ab458aac88)
- b0a67705c6b22aac3208be327cf5305ea0a9abb18c064565d6d48a032d7873b0 (57162cdc92edcfb0c5c92f822ee4f01005a5e930)
- 9c8e4c17cf7b993c807a99be2317e04a1420237a79a9eb614d3693734272c3bc (5f8a8fc205141635db140aa1e6d318a42e4d1065)
- 8382e96c9f44901bbffed491d97d44b1e1469db21beb81865d87645ed93bf05a (7126ee88e714c7b31c9f18e49b2a301b644744ec)
- aab5fca377fb7302aae490982ba2baa68f24af888b8aef4edd5558b828ebbeb6 (f2deac70e268d4bc0e5a20ad61f0875bf9d24514)
- 691588536f9417003f4028fa232ec7a789903af62842d5489cb4aaadfbb0f43a (de82def9df099072df0078f8e8c969ba75ad235a)
- 825f4e154a183fed1f8406ca80da247e62f5b42b4d1e6da89a9b591bb64f9d48 (29dc3e8a996dfa7beed9630a229a42a9e94bfce9)
- a9b234bd9306a181228e46f180d90425118ba49130931f1da19d940f0ede5393 (b2a4b621ed09ca8663f54c97296809582e9966af)
- 702cbe82ea98902d59a6c6c49adf1665df8540d639436aa1d5d3a79ad04e20f9 (9aa054204d8df94ff98dcc49cb2618b6170513cc)
- 401ae63ef3c71a8dc18e7d3846ab987f24b61305dd9833657eaaa2c7b06f489e (347ab02f86f1ca37d4018235f7680ec345160a18)
- 1909173fbdd8b7d686b857fb12008023f37d8076c0c33b22ad9e465be0c8e2c0 (dcc9b724e1d774bc7ae5e809f33b79714c0d40b7)
- 4c7b4790510a7363619bf736ae30f2bd2f0a70a11e468b519ddfc1dca6cde680 (801847209f9ab4945ce90d0e8b32bae94958b654)
- 1e6685ec7d19fa0a8d0acb0a2d101735909abeaa55c597ad7141c078515ecf59 (590addb2768e4dca687fa1e5f41d98e7c34f5292)
- e749c2286dde8d2b7322d7ab037c00441eacb889e33e1c2ba86fe7af9a52963f (1710b600b9b8db84f2fb7bcb13bce80c8dedad82)
- 2adefe61fc557c2488bcbd42f918df278fa6d9b62a1a107e679375c6ae40e9c3 (f15e70ceab2e7aadd419f5c552551706a9327cab)
- 35dbc37480563e67eebdd6d411d33b6d545c553a4598cf796ca56599f6b3c18a (b52e3b465b9ddc85f617c90c5f41c3fbebf5fe80)
- b2ebe7ea46b615b153e703e4ca40024d9805e6eb66469b3ac3b61bc2b37c9c31 (b4b10074bcf7c28edd5132fef3cabf4a880d3c2b)
- fc67adf5c64b7d6fe43bbc7b49b42fb4a945d03fdd88654ca97f236a2187c66b (4f39d2f704723849ade4b2987a97e2274fc7b38c)
- 01aca72483c17a8b6ba79f446587ecb73e8b60f3aa8b1592a1555e3eed6d2a51 (51492794c257301331afe54e9afb2c2a737d99a3)
- 6015e5980ad463a5d6b093dcf83c4a29020926d7a268232aa3c8348fa2964472 (91dfab34fb5e9fcbd4a400961468429bd7ba4e3f)
- 137178b078f0566d0e64e3c9647416137927daf997f2f078421c42423b31287d (e64094f990bfab0ef81a2db169b34bfd1f86bd7c)
- 547cbf466fdd1e98f5e0668f68c672a7b43cbedefbfd7b62faec585fc2945adb (1f203a74b35a95dbf304f682ca518e74e657c481)
- 5b8224a871903ce4b7719834e539f48ad0c34721e3c56bf7490d73a81ce456bd (3c7ed3458c8706cf1f08ba8f9db999d1f5cf45ab)
- b1b912b7440f4ec92b9bfd87fc9a713d1618dad95d4235210deb18fae9c49738 (63e5a73de957036dd0cae4fea6ac0d47f1c67cf1)
- 56b813290f2a68025626d4b4a0c3a9128acf484d9f007529a7c770ea63944e3b (e1ba526e8a3dba5397f2ec571ef1316de1c98625)
- b5babb781848059e2301d5e3fce50264129ece895c32c33990cc90b3436e80f7 (0764a6a9296a27366d48a30d07daaf2c503ab5c4)
- 717dac0467e5dc6be817cbb39f783e7ca2cc6a267065420633962ff2a0324f0b (d6f109fe6446c1897e07661c8853f54708350af3)

.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "models/mistral_large_ft",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 12288,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 96,
17
+ "num_hidden_layers": 88,
18
+ "num_key_value_heads": 8,
19
+ "quantization_config": {
20
+ "activation_scheme": "dynamic",
21
+ "ignored_layers": [
22
+ "lm_head"
23
+ ],
24
+ "quant_method": "fp8"
25
+ },
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_theta": 1000000.0,
28
+ "sliding_window": null,
29
+ "tie_word_embeddings": false,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.44.2",
32
+ "use_cache": true,
33
+ "vocab_size": 32768
34
+ }
config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "models/mistral_large_ft",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 12288,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 96,
17
+ "num_hidden_layers": 88,
18
+ "num_key_value_heads": 8,
19
+ "quantization_config": {
20
+ "activation_scheme": "dynamic",
21
+ "ignored_layers": [
22
+ "lm_head"
23
+ ],
24
+ "quant_method": "fp8"
25
+ },
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_theta": 1000000.0,
28
+ "sliding_window": null,
29
+ "tie_word_embeddings": false,
30
+ "torch_dtype": "float32",
31
+ "transformers_version": "4.44.2",
32
+ "use_cache": true,
33
+ "vocab_size": 32768
34
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.44.2"
6
+ }
model-00001-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:136a5c304a0f126a0c90e936c194fa395c20902c9b67f5818d51e2e92f4f59dd
3
+ size 4706210552
model-00002-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bad331c0e38b661057a3db995a35a4acff2d2e47ba2fd0a7a9dcfa123230878
3
+ size 4857304836
model-00003-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ed1e79aafc2cab751351ef12aa7beb5e895f090b9fe3030097539646df7ad87
3
+ size 4832238232
model-00004-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba0366d06e1495ee4875181ac66dd8beab10d6e32be544a650a9ef17faea6c38
3
+ size 4857304868
model-00005-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b523fdab23a4f63c6813978a39fa452e40d0a0dcc2cc9aa53f3f329a0a12eea6
3
+ size 4832238296
model-00006-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eeee838b5e2436d906bb9dcb9adcee79fed8d7661484c0bf15e9fd29987f6b50
3
+ size 4857304884
model-00007-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a6f223694e474dddbe352f34b759a14beb9879608407628f59e2bfa1ff39222
3
+ size 4832238296
model-00008-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17e74ecac02011f93942b3481c27feaafc4b1649e5b3f446317cd2de1145f545
3
+ size 4857304884
model-00009-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70b4f918eaeab46fcc81d6bd5c201f96e889f82ace5dcd9d2319e895aa91a12c
3
+ size 4832238296
model-00010-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9bce7ec16c02dde92bc384874974e638b5311ea1ecb049cb72447a5c05cfcc5
3
+ size 4857304884
model-00011-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7e7900b8a451e9042f1c1e35dfa0f148eb3440b79bae0cbd2274d926a4e6ac1
3
+ size 4832238296
model-00012-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fee79fa0e6ebc8ce4a3d61df3cd47079d4e14e7bc9c09a9abee256911171c8e5
3
+ size 4857304884
model-00013-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecee10ac82396b88c4af6b8f15e84cfe9036c8dad00d09c0b8ccb69eafe56daf
3
+ size 4832238296
model-00014-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b113e50cb9fc009ef1424d262f3a4b2b146ff569197e7135e1cf4cd648fc1190
3
+ size 4857304884
model-00015-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39be04484eeaffe8c8c6c212d07f8d0b939acedae7b759bda4e07d3381900312
3
+ size 4832238296
model-00016-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfb7758ab0b06e6c62aa3a6a1cc4f7cfd2dfac76a84bf27445073bc531fb3853
3
+ size 4857304884
model-00017-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97ac8adeea5bf427ca58bd935d1eab08a919748390f45401a13f12fde9ca4674
3
+ size 4832238296
model-00018-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:991f10b9b8850f9899befa44bd8f01ed49e67beaaabf052b20cee8487b79c769
3
+ size 4857304884
model-00019-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98346b57dbd1505e713a8dd3c711a94dc594bba5d403d310da863078d95d5974
3
+ size 4832238296
model-00020-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d92bb71d73a94090ee55455a07e453eae825dad0916d25dc9f2af1f37aba5bb3
3
+ size 4857304884
model-00021-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af14185dc803823728e36e286871527407bb78ba7d32b8c7cb9443ee25528f62
3
+ size 4832238296
model-00022-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2388f5bb820bb992532d4792c9bcb6a3a8bc6835ce8c4c8fb216b2392edc6007
3
+ size 4857304884
model-00023-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fb857195622689c8dc98f573c9c4633372d8311fd9c13077ef448d496929208
3
+ size 4832238296
model-00024-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f375aba0537974ad83a4e56f0e5aa4f8be917f516844d2b874918109e78eab5b
3
+ size 4857304884
model-00025-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f806f76dbe2d8a90744384c9b9a72990e4a7b5dd35e5b8abccd9a34564a51851
3
+ size 4832238296
model-00026-of-00026.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:934722be60dc796d3a78f2780de59577c97d1ac90a12c381d547396e22792f25
3
+ size 4051946472
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
+ size 587583
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff