Lyte commited on
Commit
d7ce18a
·
verified ·
1 Parent(s): 8af4a30

Upload model and tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "\n\n### Instruction:\n": 60512,
3
+ "\n\n### Response:\n": 60513,
4
+ "<pad>": 60514
5
+ }
best_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5852a3ec118bab67b1630652295c9943ae18afeb0638cb18942c17d0dd3118b9
3
+ size 613600416
checkpoint_4250.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5070adc5ce70b851c8f06fdac3600dde016fce8858675034c35fa0fcab1c274
3
+ size 613600416
checkpoint_4500.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a44e80b9f9e369f18596141ee80b1260c94bf29bacb95c69bd0cfd10bc331f3
3
+ size 613600416
checkpoint_4750.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d43088ae2ad0df8a9268841ed596112337163e65b1b0193c8c4be15c8ea6b2a
3
+ size 613600416
model_4908_100000_4.34908.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df5bee9c4c2a565ff89fa3ec992262a01a7802296be52c87ec156fe3a0da6506
3
+ size 613600416
special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "\n\n### Instruction:\n",
4
+ "\n\n### Response:\n"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<s>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "</s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<pad>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": true,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3197ff342f6cf47c17d8c71bebdd2463aad9a145b78c6310b9841ac690a848b1
3
+ size 940840
tokenizer_config.json ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": true,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": true,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "60512": {
31
+ "content": "\n\n### Instruction:\n",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "60513": {
39
+ "content": "\n\n### Response:\n",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "60514": {
47
+ "content": "<pad>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ }
54
+ },
55
+ "additional_special_tokens": [
56
+ "\n\n### Instruction:\n",
57
+ "\n\n### Response:\n"
58
+ ],
59
+ "bos_token": "<s>",
60
+ "clean_up_tokenization_spaces": false,
61
+ "eos_token": "</s>",
62
+ "extra_special_tokens": {},
63
+ "legacy": true,
64
+ "model_max_length": 1000000000000000019884624838656,
65
+ "pad_token": "<pad>",
66
+ "sp_model_kwargs": {},
67
+ "tokenizer_class": "LlamaTokenizer",
68
+ "unk_token": "<unk>",
69
+ "use_default_system_prompt": false,
70
+ "use_fast": true
71
+ }