swapnil7777 commited on
Commit
b68a41b
·
verified ·
1 Parent(s): 6aa536e

Upload 4 files

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<image>": 32000,
3
+ "<pad>": 32001
4
+ }
chat_template.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "chat_template": "{% for message in messages %}{% if message['role'] != 'system' %}{{ message['role'].upper() + ': '}}{% endif %}{# Render all images first #}{% for content in message['content'] | selectattr('type', 'equalto', 'image') %}{{ '<image>\n' }}{% endfor %}{# Render all text next #}{% if message['role'] != 'assistant' %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{{ content['text'] + ' '}}{% endfor %}{% else %}{% for content in message['content'] | selectattr('type', 'equalto', 'text') %}{% generation %}{{ content['text'] + ' '}}{% endgeneration %}{% endfor %}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ 'ASSISTANT:' }}{% endif %}"
3
+ }
config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlavaForConditionalGeneration"
4
+ ],
5
+ "ignore_index": -100,
6
+ "image_token_index": 32000,
7
+ "model_type": "llava",
8
+ "pad_token_id": 32001,
9
+ "projector_hidden_act": "gelu",
10
+ "text_config": {
11
+ "_name_or_path": "lmsys/vicuna-7b-v1.5",
12
+ "architectures": [
13
+ "LlamaForCausalLM"
14
+ ],
15
+ "max_position_embeddings": 4096,
16
+ "model_type": "llama",
17
+ "rms_norm_eps": 1e-05,
18
+ "torch_dtype": "float16",
19
+ "vocab_size": 32064
20
+ },
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "float16",
23
+ "transformers_version": "4.36.0.dev0",
24
+ "vision_config": {
25
+ "hidden_size": 1024,
26
+ "image_size": 336,
27
+ "intermediate_size": 4096,
28
+ "model_type": "clip_vision_model",
29
+ "num_attention_heads": 16,
30
+ "num_hidden_layers": 24,
31
+ "patch_size": 14,
32
+ "projection_dim": 768,
33
+ "vocab_size": 32000
34
+ },
35
+ "vision_feature_layer": -2,
36
+ "vision_feature_select_strategy": "default",
37
+ "vocab_size": 32064
38
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }