Upload 24 files
Browse files- .gitattributes +1 -0
- CodeNinja-1.0-OpenChat-7B-q8f16_1-metal_x86_64.dylib +3 -0
- params_shard_111.bin +3 -0
- params_shard_112.bin +3 -0
- params_shard_113.bin +3 -0
- params_shard_114.bin +3 -0
- params_shard_115.bin +3 -0
- params_shard_116.bin +3 -0
- params_shard_117.bin +3 -0
- params_shard_118.bin +3 -0
- params_shard_119.bin +3 -0
- params_shard_120.bin +3 -0
- params_shard_121.bin +3 -0
- params_shard_122.bin +3 -0
- params_shard_123.bin +3 -0
- params_shard_124.bin +3 -0
- params_shard_125.bin +3 -0
- params_shard_126.bin +3 -0
- params_shard_127.bin +3 -0
- params_shard_128.bin +3 -0
- params_shard_129.bin +3 -0
- params_shard_130.bin +3 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +65 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
CodeNinja-1.0-OpenChat-7B-q8f16_1-metal_x86_64.dylib filter=lfs diff=lfs merge=lfs -text
|
CodeNinja-1.0-OpenChat-7B-q8f16_1-metal_x86_64.dylib
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0db213a0b91a4db9ed97ba4c2dda99fd15a6e0607670c6e58279251d3c7b3ec4
|
3 |
+
size 9814856
|
params_shard_111.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9ce3cc888a51267e15ea3eab5ac610683e877a49c068f222b2a1a50f8a70bc58
|
3 |
+
size 30425088
|
params_shard_112.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b315bf93477b2e40b9f4f933d62b762108fcabbfca322972288b6b35e6c4e224
|
3 |
+
size 58720256
|
params_shard_113.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8e6a86962d73e0a4f398178edfca5748a9dcbad7c44cfd12db3746c63ee97559
|
3 |
+
size 25165824
|
params_shard_114.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7e481a4264df51cd92fda40aeefed7b9a9bc8891b5e3b806ca9923af0baed54b
|
3 |
+
size 117440512
|
params_shard_115.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7b589d3588ddf3f4c1f8bc7bb074f136334f48d42270ef1bf36728bf9abd1084
|
3 |
+
size 30425088
|
params_shard_116.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:614df013cb440c83131af204edd595820a730b1729d04b5db397e50c8c6c4c2e
|
3 |
+
size 58720256
|
params_shard_117.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:550d25683bd014bc813a96a76e3fa3d75cfb8cda76680b188313ad6072acb68b
|
3 |
+
size 25165824
|
params_shard_118.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14f8f9b671b5f7d656cad5cd9db5c9b45a082971189f6185cd8362c0250b6ee6
|
3 |
+
size 117440512
|
params_shard_119.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:06a5dd3c7d734deaf96ba2106f86641b907b94f7ff9a89141ef181fa40836be9
|
3 |
+
size 30425088
|
params_shard_120.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:335b97d4ddc5e198614339155b280319d22d8ada4860b60b8551f4e16b954b01
|
3 |
+
size 58720256
|
params_shard_121.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b0914c940611a31171c1f935821e44a1d781569de5b6da5d7bc84604cbbf1efa
|
3 |
+
size 25165824
|
params_shard_122.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:da52aeb5d15c7259487ba32c68932579083575cd0b46b3b167e8ec77fb55c084
|
3 |
+
size 117440512
|
params_shard_123.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c219feaff02218dce486f0e1ec62ea93ae7eb00af3fe76385cca1a373cfc6e44
|
3 |
+
size 30425088
|
params_shard_124.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4a84972900430396f83881055c3bad98e874bb3e7396199a75c0c57e5edc95a4
|
3 |
+
size 58720256
|
params_shard_125.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9725695e7f2c617a81e633524499085c1f83585edd22d0210d8fb0527fca6797
|
3 |
+
size 25165824
|
params_shard_126.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e31071792eccb27192fa05a1ac347a57d05c8e68584aff500904aa5997d8ff0
|
3 |
+
size 117440512
|
params_shard_127.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a3c88ba8a0cdec6f769fad02dcb1fdd84b4f5f8505ebd5610276babdf38c701b
|
3 |
+
size 30425088
|
params_shard_128.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e50b9e9d296ed61deee455f6a5e2bb10d562c95d97c51bcb77bab351cbbe459b
|
3 |
+
size 58720256
|
params_shard_129.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff095bb926aad32d8f5da8928241aca6429cd0a826990114957753680e3d8e83
|
3 |
+
size 131080192
|
params_shard_130.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2e84de0de78eb6be57b213bd38af1aeff49d5b3d8ad271c8c92d2ea2dd1788b0
|
3 |
+
size 20275712
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
tokenizer_config.json
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"0": {
|
6 |
+
"content": "<unk>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"1": {
|
14 |
+
"content": "<s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"2": {
|
22 |
+
"content": "</s>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"32000": {
|
30 |
+
"content": "<|end_of_turn|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": false
|
36 |
+
},
|
37 |
+
"32001": {
|
38 |
+
"content": "<|pad_0|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": false
|
44 |
+
}
|
45 |
+
},
|
46 |
+
"additional_special_tokens": [
|
47 |
+
"<|end_of_turn|>",
|
48 |
+
"<|pad_0|>"
|
49 |
+
],
|
50 |
+
"bos_token": "<s>",
|
51 |
+
"chat_template": "{{ bos_token }}{% for message in messages %}{{ 'GPT4 Correct ' + message['role'].title() + ': ' + message['content'] + '<|end_of_turn|>'}}{% endfor %}{% if add_generation_prompt %}{{ 'GPT4 Correct Assistant:' }}{% endif %}",
|
52 |
+
"clean_up_tokenization_spaces": false,
|
53 |
+
"eos_token": "</s>",
|
54 |
+
"legacy": true,
|
55 |
+
"sep_embed": true,
|
56 |
+
"model_max_length": 1000000000000000019884624838656,
|
57 |
+
"pad_token": "</s>",
|
58 |
+
"sp_model_kwargs": {},
|
59 |
+
"spaces_between_special_tokens": false,
|
60 |
+
"tokenizer_class": "LlamaTokenizer",
|
61 |
+
"trust_remote_code": false,
|
62 |
+
"unk_token": "<unk>",
|
63 |
+
"use_default_system_prompt": true,
|
64 |
+
"use_fast": true
|
65 |
+
}
|