ruihanglai commited on
Commit
cbecde0
·
1 Parent(s): f546be0
This view is limited to 50 files because it contains too many changes.   See raw diff
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<image>": 32000,
3
+ "<pad>": 32001
4
+ }
mlc-chat-config.json ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "0.1.0",
3
+ "model_type": "llava",
4
+ "quantization": "q0f16",
5
+ "model_config": {
6
+ "image_token_index": 32000,
7
+ "text_config": {
8
+ "hidden_size": 4096,
9
+ "intermediate_size": 11008,
10
+ "num_attention_heads": 32,
11
+ "num_hidden_layers": 32,
12
+ "rms_norm_eps": 1e-05,
13
+ "vocab_size": 32000,
14
+ "position_embedding_base": 10000.0,
15
+ "context_window_size": 4096,
16
+ "prefill_chunk_size": 2048,
17
+ "num_key_value_heads": 32,
18
+ "head_dim": 128,
19
+ "tensor_parallel_shards": 1,
20
+ "max_batch_size": 1,
21
+ "kwargs": {
22
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
23
+ "architectures": [
24
+ "LlamaForCausalLM"
25
+ ],
26
+ "model_type": "llama",
27
+ "torch_dtype": "float16",
28
+ "hidden_act": "silu",
29
+ "initializer_range": 0.02,
30
+ "pretraining_tp": 1,
31
+ "use_cache": true,
32
+ "rope_scaling": null,
33
+ "attention_bias": false,
34
+ "attention_dropout": 0.0,
35
+ "mlp_bias": false,
36
+ "return_dict": true,
37
+ "output_hidden_states": false,
38
+ "output_attentions": false,
39
+ "torchscript": false,
40
+ "use_bfloat16": false,
41
+ "tf_legacy_loss": false,
42
+ "pruned_heads": {},
43
+ "tie_word_embeddings": false,
44
+ "chunk_size_feed_forward": 0,
45
+ "is_encoder_decoder": false,
46
+ "is_decoder": false,
47
+ "cross_attention_hidden_size": null,
48
+ "add_cross_attention": false,
49
+ "tie_encoder_decoder": false,
50
+ "max_length": 20,
51
+ "min_length": 0,
52
+ "do_sample": false,
53
+ "early_stopping": false,
54
+ "num_beams": 1,
55
+ "num_beam_groups": 1,
56
+ "diversity_penalty": 0.0,
57
+ "temperature": 1.0,
58
+ "top_k": 50,
59
+ "top_p": 1.0,
60
+ "typical_p": 1.0,
61
+ "repetition_penalty": 1.0,
62
+ "length_penalty": 1.0,
63
+ "no_repeat_ngram_size": 0,
64
+ "encoder_no_repeat_ngram_size": 0,
65
+ "bad_words_ids": null,
66
+ "num_return_sequences": 1,
67
+ "output_scores": false,
68
+ "return_dict_in_generate": false,
69
+ "forced_bos_token_id": null,
70
+ "forced_eos_token_id": null,
71
+ "remove_invalid_values": false,
72
+ "exponential_decay_length_penalty": null,
73
+ "suppress_tokens": null,
74
+ "begin_suppress_tokens": null,
75
+ "finetuning_task": null,
76
+ "id2label": {
77
+ "0": "LABEL_0",
78
+ "1": "LABEL_1"
79
+ },
80
+ "label2id": {
81
+ "LABEL_0": 0,
82
+ "LABEL_1": 1
83
+ },
84
+ "tokenizer_class": null,
85
+ "prefix": null,
86
+ "bos_token_id": 1,
87
+ "pad_token_id": 0,
88
+ "eos_token_id": 2,
89
+ "sep_token_id": null,
90
+ "decoder_start_token_id": null,
91
+ "task_specific_params": null,
92
+ "problem_type": null,
93
+ "transformers_version": "4.41.2"
94
+ }
95
+ },
96
+ "vision_config": {
97
+ "hidden_size": 1024,
98
+ "image_size": 336,
99
+ "intermediate_size": 4096,
100
+ "num_attention_heads": 16,
101
+ "num_hidden_layers": 24,
102
+ "patch_size": 14,
103
+ "projection_dim": 768,
104
+ "vocab_size": 32000,
105
+ "num_channels": 3,
106
+ "layer_norm_eps": 1e-06,
107
+ "kwargs": {
108
+ "model_type": "clip_vision_model"
109
+ }
110
+ },
111
+ "vocab_size": 32064,
112
+ "context_window_size": 4096,
113
+ "sliding_window_size": -1,
114
+ "prefill_chunk_size": 2048,
115
+ "tensor_parallel_shards": 1,
116
+ "max_batch_size": 80,
117
+ "text_architecture": "LlamaForCausalLM"
118
+ },
119
+ "vocab_size": 32064,
120
+ "context_window_size": 4096,
121
+ "sliding_window_size": -1,
122
+ "prefill_chunk_size": 2048,
123
+ "attention_sink_size": -1,
124
+ "tensor_parallel_shards": 1,
125
+ "temperature": 1.0,
126
+ "presence_penalty": 0.0,
127
+ "frequency_penalty": 0.0,
128
+ "repetition_penalty": 1.0,
129
+ "top_p": 1.0,
130
+ "tokenizer_files": [
131
+ "tokenizer.model",
132
+ "tokenizer.json",
133
+ "added_tokens.json",
134
+ "tokenizer_config.json"
135
+ ],
136
+ "tokenizer_info": {
137
+ "token_postproc_method": "byte_fallback",
138
+ "prepend_space_in_encode": true,
139
+ "strip_space_in_decode": true
140
+ },
141
+ "conv_template": {
142
+ "name": "llava",
143
+ "system_template": "{system_message}",
144
+ "system_message": "\n",
145
+ "system_prefix_token_ids": [
146
+ 1
147
+ ],
148
+ "add_role_after_system_message": false,
149
+ "roles": {
150
+ "user": "USER",
151
+ "assistant": "ASSISTANT"
152
+ },
153
+ "role_templates": {
154
+ "user": "{user_message}",
155
+ "assistant": "{assistant_message}",
156
+ "tool": "{tool_message}"
157
+ },
158
+ "messages": [],
159
+ "seps": [
160
+ " "
161
+ ],
162
+ "role_content_sep": ": ",
163
+ "role_empty_sep": ":",
164
+ "stop_str": [
165
+ "</s>"
166
+ ],
167
+ "stop_token_ids": [
168
+ 2
169
+ ],
170
+ "function_string": "",
171
+ "use_function_calling": false
172
+ },
173
+ "pad_token_id": 32001,
174
+ "bos_token_id": 1,
175
+ "eos_token_id": 2
176
+ }
ndarray-cache.json ADDED
The diff for this file is too large to render. See raw diff
 
params_shard_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acd6451d074678786bccf732416fbe34a09ac1ea513b1fd3a0ed82bc2555aa5e
3
+ size 262668288
params_shard_1.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f6fe8496a9694e3f5c5980dad3db12d3d9c69002da8e5d8bf9479a2a22fe16d
3
+ size 90177536
params_shard_10.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92346ababd8dcae68865aef13b1ca4cfb5e4ffa6dc334b6e41e7a2eb6735b7c1
3
+ size 33554432
params_shard_100.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48e501110a21c2ab0c09c56642e3f74e13925b2f817928c38bc5de0073f6d074
3
+ size 25192448
params_shard_101.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4159df2fb278168142ccc1150a274d5f4665a6b8b96ec5317c84406a1d86c5fe
3
+ size 25192448
params_shard_102.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcccb6304426170e44504cde4109c06b17c8da7333f3d562d7ef53457b9ab954
3
+ size 25192448
params_shard_103.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:056396d97d032c279d0cb02c00b697e62e1be8bdb5b3823f4b9f872ead33cdba
3
+ size 25192448
params_shard_104.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fccc4e53d7b1598c630aa736219f252c1e77294f6bb7a7715a511f46bcba7f2
3
+ size 25192448
params_shard_105.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0b3bac81d0a4bf55973e9a92fdd422e4c83a7ae9f2972e1f394c01ce2749263
3
+ size 90177536
params_shard_106.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2c36c6ea03581cc65449ff85ba3fd09b81c01191f77c557c49476ab409e4c91
3
+ size 180355072
params_shard_107.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfed5895615e1cf87d068050deb1190bc0a2bc5c32e1d100a06aedcb98c6d4d0
3
+ size 33554432
params_shard_108.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0807c48c9b1de01edc8d44d89425550b5016bf40696d5251d134fd30fb3a94bf
3
+ size 90177536
params_shard_109.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93943ef870f6145a8ae366bef5cff8378e5bced9c0353cceedb5394cb7afab3e
3
+ size 180355072
params_shard_11.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a69ef612266727ff5c4c6111f81ef7ee8d9cc62d061b19189aed7c641921426a
3
+ size 90177536
params_shard_110.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2a6a12f4c8cb9769c511f00ef0ac856cde0ef5268ba535f910a46a3793bbf2b
3
+ size 100663296
params_shard_111.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab0933fca48e739132d28af747f5ea38f2bab59607ca80479ed528fca3a8192c
3
+ size 33554432
params_shard_112.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:149bea587698632317b0af107116c573ee5029fa2faec4435bc1832d76cca69d
3
+ size 90177536
params_shard_113.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07946d26cc096d46771ccd43b8119cb88e433a8ceab55ad4fc9d7cd80b112923
3
+ size 180355072
params_shard_114.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44a813143ed19f059ce7026d50e0862de52f54079f936e502bf48c5fdca0d357
3
+ size 100663296
params_shard_115.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57d6ec37600d29ed5b05aa99583f464353afe303969e973747d6cacd13a127cf
3
+ size 33554432
params_shard_116.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e0c339ba96b3393f50a73e095e8e32608c8a31e0d2d6ad9fe3ed9b01ef01066
3
+ size 90177536
params_shard_117.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ef1fb3917960dfadd888d329a4cfd23802619b49f1ec68642dec6b69d5ac018
3
+ size 180355072
params_shard_118.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3141a3af6d0e5e6219aa1973be7d1d1a53a0b3e361e385205a62b9f4c397dc94
3
+ size 100663296
params_shard_119.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bee72e071e54dbb3518d8a9ccb84f7f74811ea2f1ac5b1b9016d79378e00a0b3
3
+ size 33554432
params_shard_12.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12625b2db0677dc315f236164136f3dc15779fb560028a0488453b80f6017c7e
3
+ size 180355072
params_shard_120.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e499f198a26e6ffc73849110c4462b5df14b16be0477c74c9ec8a19731e3505
3
+ size 90177536
params_shard_121.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d123f94fc44cb41b96b3de8366ead3755d79133543717b3668e59e3c400c4358
3
+ size 180355072
params_shard_122.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:207d38677f59a11b485ad6499c131b06f3ac08b854468be64828cf2df572da04
3
+ size 100663296
params_shard_123.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0ed6970ec0483180c383d8e294e499807b7199a97aa88b8967c79abc4925218
3
+ size 33554432
params_shard_124.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f81209f0ada5cd0390c44093df9b4402458ec2216384b2918ccfd69a27de8c2f
3
+ size 90177536
params_shard_125.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6434a5fda2d4202a49138135cb6ec9af2760645f7bc0062f79eb4683a2490a55
3
+ size 180355072
params_shard_126.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2faa78623536bf155531626b3c8fe6bbc44b171fdc4d729cd9b8f12237855c3
3
+ size 100663296
params_shard_127.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b0fc40a3b9aefe1a1c18a2d548efe76000153d268cb11ee440623b1da4efe21
3
+ size 33554432
params_shard_128.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7de8fc6fc88afd6ac81d9b1e0d7221678263b719c00d04d162ec6e437fd0ce4
3
+ size 90177536
params_shard_129.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59a367c60d17f28333b21431a3a630d9de255cdf689d1ec9381ce260649441d1
3
+ size 180355072
params_shard_13.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57b2024059b156fc4a6a2e72d336b76f224ab0f0dd4cfcdde9ccf98f275b49f1
3
+ size 100663296
params_shard_130.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e53dcec19781c080ca90946c1e28ef80512a36f332d8a003c3e911441d7c055b
3
+ size 100663296
params_shard_131.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b384bac5d2aadfa1d0b89f9afb723308b4d41bc17bdbe220937edceff78a8c3
3
+ size 33554432
params_shard_132.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47aba78b10a4ddcf178216c75769b0f7644fc4ec0c86d5d6eb508b3d4a2a451d
3
+ size 90177536
params_shard_133.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0b1c53e6d10080f81077ce960b33328af566a3f9024d95d115949adca3e24a8
3
+ size 180355072
params_shard_134.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:383739efc84d7a46fb57375e26ff037ce498e14824560c973fd1171b84eda6b4
3
+ size 100663296
params_shard_135.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a64c1d5af2d8b3a1f8b106b2412416003734b95637c87fce66f901bb92ecbda
3
+ size 33554432
params_shard_136.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd73ab4a09010144df0ecae0f85ae0e6ad3a8ac8376c0a2e46ce4342441df61c
3
+ size 90177536
params_shard_137.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd8d69bc1789408e3153bef69e199485f82b8b69a1c1ec2d8e77562ec380a89b
3
+ size 180355072
params_shard_138.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5229ecb358f79ea05ac6921173503c1ee11c0747473dd9b1f0a28bdbb933e349
3
+ size 100663296
params_shard_139.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dc072d40d812809ba59ff2343e5305f4c5ee4f815c58a5d2278cff922323711
3
+ size 33554432
params_shard_14.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7d0d772e72671aac83f40457a94b399f904815036c4c91e01a7a58e4832ce60
3
+ size 33554432