Upload 23 files
Browse files- .gitattributes +1 -0
- added_tokens.json +33 -0
- chat_template.jinja +54 -0
- config.json +109 -0
- configuration_intern_vit.py +120 -0
- configuration_internvl_chat.py +97 -0
- generation_config.json +4 -0
- merges.txt +0 -0
- openvino_config.json +27 -0
- openvino_detokenizer.bin +3 -0
- openvino_detokenizer.xml +220 -0
- openvino_language_model.bin +3 -0
- openvino_language_model.xml +0 -0
- openvino_text_embeddings_model.bin +3 -0
- openvino_text_embeddings_model.xml +177 -0
- openvino_tokenizer.bin +3 -0
- openvino_tokenizer.xml +721 -0
- openvino_vision_embeddings_model.bin +3 -0
- openvino_vision_embeddings_model.xml +0 -0
- preprocessor_config.json +27 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +280 -0
- vocab.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
added_tokens.json
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</box>": 151673,
|
3 |
+
"</img>": 151666,
|
4 |
+
"</quad>": 151669,
|
5 |
+
"</ref>": 151671,
|
6 |
+
"</tool_call>": 151658,
|
7 |
+
"<IMG_CONTEXT>": 151667,
|
8 |
+
"<box>": 151672,
|
9 |
+
"<img>": 151665,
|
10 |
+
"<quad>": 151668,
|
11 |
+
"<ref>": 151670,
|
12 |
+
"<tool_call>": 151657,
|
13 |
+
"<|box_end|>": 151649,
|
14 |
+
"<|box_start|>": 151648,
|
15 |
+
"<|endoftext|>": 151643,
|
16 |
+
"<|file_sep|>": 151664,
|
17 |
+
"<|fim_middle|>": 151660,
|
18 |
+
"<|fim_pad|>": 151662,
|
19 |
+
"<|fim_prefix|>": 151659,
|
20 |
+
"<|fim_suffix|>": 151661,
|
21 |
+
"<|im_end|>": 151645,
|
22 |
+
"<|im_start|>": 151644,
|
23 |
+
"<|image_pad|>": 151655,
|
24 |
+
"<|object_ref_end|>": 151647,
|
25 |
+
"<|object_ref_start|>": 151646,
|
26 |
+
"<|quad_end|>": 151651,
|
27 |
+
"<|quad_start|>": 151650,
|
28 |
+
"<|repo_name|>": 151663,
|
29 |
+
"<|video_pad|>": 151656,
|
30 |
+
"<|vision_end|>": 151653,
|
31 |
+
"<|vision_pad|>": 151654,
|
32 |
+
"<|vision_start|>": 151652
|
33 |
+
}
|
chat_template.jinja
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{%- if tools %}
|
2 |
+
{{- '<|im_start|>system\n' }}
|
3 |
+
{%- if messages[0]['role'] == 'system' %}
|
4 |
+
{{- messages[0]['content'] }}
|
5 |
+
{%- else %}
|
6 |
+
{{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
|
7 |
+
{%- endif %}
|
8 |
+
{{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
|
9 |
+
{%- for tool in tools %}
|
10 |
+
{{- "\n" }}
|
11 |
+
{{- tool | tojson }}
|
12 |
+
{%- endfor %}
|
13 |
+
{{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
|
14 |
+
{%- else %}
|
15 |
+
{%- if messages[0]['role'] == 'system' %}
|
16 |
+
{{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
|
17 |
+
{%- else %}
|
18 |
+
{{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
|
19 |
+
{%- endif %}
|
20 |
+
{%- endif %}
|
21 |
+
{%- for message in messages %}
|
22 |
+
{%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
|
23 |
+
{{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
|
24 |
+
{%- elif message.role == "assistant" %}
|
25 |
+
{{- '<|im_start|>' + message.role }}
|
26 |
+
{%- if message.content %}
|
27 |
+
{{- '\n' + message.content }}
|
28 |
+
{%- endif %}
|
29 |
+
{%- for tool_call in message.tool_calls %}
|
30 |
+
{%- if tool_call.function is defined %}
|
31 |
+
{%- set tool_call = tool_call.function %}
|
32 |
+
{%- endif %}
|
33 |
+
{{- '\n<tool_call>\n{"name": "' }}
|
34 |
+
{{- tool_call.name }}
|
35 |
+
{{- '", "arguments": ' }}
|
36 |
+
{{- tool_call.arguments | tojson }}
|
37 |
+
{{- '}\n</tool_call>' }}
|
38 |
+
{%- endfor %}
|
39 |
+
{{- '<|im_end|>\n' }}
|
40 |
+
{%- elif message.role == "tool" %}
|
41 |
+
{%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
|
42 |
+
{{- '<|im_start|>user' }}
|
43 |
+
{%- endif %}
|
44 |
+
{{- '\n<tool_response>\n' }}
|
45 |
+
{{- message.content }}
|
46 |
+
{{- '\n</tool_response>' }}
|
47 |
+
{%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
|
48 |
+
{{- '<|im_end|>\n' }}
|
49 |
+
{%- endif %}
|
50 |
+
{%- endif %}
|
51 |
+
{%- endfor %}
|
52 |
+
{%- if add_generation_prompt %}
|
53 |
+
{{- '<|im_start|>assistant\n' }}
|
54 |
+
{%- endif %}
|
config.json
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"InternVLChatModel"
|
4 |
+
],
|
5 |
+
"auto_map": {
|
6 |
+
"AutoConfig": "configuration_internvl_chat.InternVLChatConfig",
|
7 |
+
"AutoModel": "OpenGVLab/InternVL3-2B--modeling_internvl_chat.InternVLChatModel",
|
8 |
+
"AutoModelForCausalLM": "OpenGVLab/InternVL3-2B--modeling_internvl_chat.InternVLChatModel"
|
9 |
+
},
|
10 |
+
"downsample_ratio": 0.5,
|
11 |
+
"dynamic_image_size": true,
|
12 |
+
"force_image_size": 448,
|
13 |
+
"hidden_size": 1536,
|
14 |
+
"image_fold": null,
|
15 |
+
"img_context_token_id": 151667,
|
16 |
+
"llm_config": {
|
17 |
+
"_name_or_path": "./pretrained/Qwen2.5-32B-Instruct",
|
18 |
+
"architectures": [
|
19 |
+
"Qwen2ForCausalLM"
|
20 |
+
],
|
21 |
+
"attention_dropout": 0.0,
|
22 |
+
"bos_token_id": 151643,
|
23 |
+
"eos_token_id": 151643,
|
24 |
+
"hidden_act": "silu",
|
25 |
+
"hidden_size": 1536,
|
26 |
+
"initializer_range": 0.02,
|
27 |
+
"intermediate_size": 8960,
|
28 |
+
"max_position_embeddings": 32768,
|
29 |
+
"max_window_layers": 70,
|
30 |
+
"model_type": "qwen2",
|
31 |
+
"moe_config": null,
|
32 |
+
"num_attention_heads": 12,
|
33 |
+
"num_hidden_layers": 28,
|
34 |
+
"num_key_value_heads": 2,
|
35 |
+
"rms_norm_eps": 1e-06,
|
36 |
+
"rope_scaling": {
|
37 |
+
"factor": 2.0,
|
38 |
+
"rope_type": "dynamic",
|
39 |
+
"type": "dynamic"
|
40 |
+
},
|
41 |
+
"rope_theta": 1000000.0,
|
42 |
+
"sliding_window": null,
|
43 |
+
"torch_dtype": "bfloat16",
|
44 |
+
"use_bfloat16": true,
|
45 |
+
"use_cache": false,
|
46 |
+
"use_sliding_window": false,
|
47 |
+
"vocab_size": 151674
|
48 |
+
},
|
49 |
+
"max_dynamic_patch": 12,
|
50 |
+
"min_dynamic_patch": 1,
|
51 |
+
"model_type": "internvl_chat",
|
52 |
+
"pad2square": false,
|
53 |
+
"ps_version": "v2",
|
54 |
+
"select_layer": -1,
|
55 |
+
"system_message": null,
|
56 |
+
"template": "internvl2_5",
|
57 |
+
"tie_word_embeddings": false,
|
58 |
+
"torch_dtype": "bfloat16",
|
59 |
+
"transformers_version": null,
|
60 |
+
"use_backbone_lora": 0,
|
61 |
+
"use_llm_lora": 0,
|
62 |
+
"use_thumbnail": true,
|
63 |
+
"vision_config": {
|
64 |
+
"_name_or_path": "OpenGVLab/InternViT-6B-448px-V1-5",
|
65 |
+
"architectures": [
|
66 |
+
"InternVisionModel"
|
67 |
+
],
|
68 |
+
"attention_dropout": 0.0,
|
69 |
+
"auto_map": {
|
70 |
+
"AutoConfig": "configuration_intern_vit.InternVisionConfig",
|
71 |
+
"AutoModel": "modeling_intern_vit.InternVisionModel"
|
72 |
+
},
|
73 |
+
"capacity_factor": 1.2,
|
74 |
+
"drop_path_rate": 0.1,
|
75 |
+
"dropout": 0.0,
|
76 |
+
"eval_capacity_factor": 1.4,
|
77 |
+
"hidden_act": "gelu",
|
78 |
+
"hidden_size": 1024,
|
79 |
+
"image_size": 448,
|
80 |
+
"initializer_factor": 0.1,
|
81 |
+
"initializer_range": 1e-10,
|
82 |
+
"intermediate_size": 4096,
|
83 |
+
"laux_allreduce": "all_nodes",
|
84 |
+
"layer_norm_eps": 1e-06,
|
85 |
+
"model_type": "intern_vit_6b",
|
86 |
+
"moe_coeff_ratio": 0.5,
|
87 |
+
"moe_intermediate_size": 768,
|
88 |
+
"moe_output_scale": 4.0,
|
89 |
+
"noisy_gate_policy": "RSample_before",
|
90 |
+
"norm_type": "layer_norm",
|
91 |
+
"num_attention_heads": 16,
|
92 |
+
"num_channels": 3,
|
93 |
+
"num_experts": 8,
|
94 |
+
"num_hidden_layers": 24,
|
95 |
+
"num_routed_experts": 4,
|
96 |
+
"num_shared_experts": 4,
|
97 |
+
"patch_size": 14,
|
98 |
+
"qk_normalization": false,
|
99 |
+
"qkv_bias": true,
|
100 |
+
"shared_expert_intermediate_size": 3072,
|
101 |
+
"torch_dtype": "bfloat16",
|
102 |
+
"use_bfloat16": true,
|
103 |
+
"use_flash_attn": true,
|
104 |
+
"use_moe": false,
|
105 |
+
"use_residual": true,
|
106 |
+
"use_rts": false,
|
107 |
+
"use_weighted_residual": false
|
108 |
+
}
|
109 |
+
}
|
configuration_intern_vit.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# --------------------------------------------------------
|
2 |
+
# InternVL
|
3 |
+
# Copyright (c) 2024 OpenGVLab
|
4 |
+
# Licensed under The MIT License [see LICENSE for details]
|
5 |
+
# --------------------------------------------------------
|
6 |
+
|
7 |
+
import os
|
8 |
+
from typing import Union
|
9 |
+
|
10 |
+
from transformers.configuration_utils import PretrainedConfig
|
11 |
+
from transformers.utils import logging
|
12 |
+
|
13 |
+
logger = logging.get_logger(__name__)
|
14 |
+
|
15 |
+
|
16 |
+
class InternVisionConfig(PretrainedConfig):
|
17 |
+
r"""
|
18 |
+
This is the configuration class to store the configuration of a [`InternVisionModel`]. It is used to
|
19 |
+
instantiate a vision encoder according to the specified arguments, defining the model architecture.
|
20 |
+
|
21 |
+
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
|
22 |
+
documentation from [`PretrainedConfig`] for more information.
|
23 |
+
|
24 |
+
Args:
|
25 |
+
num_channels (`int`, *optional*, defaults to 3):
|
26 |
+
Number of color channels in the input images (e.g., 3 for RGB).
|
27 |
+
patch_size (`int`, *optional*, defaults to 14):
|
28 |
+
The size (resolution) of each patch.
|
29 |
+
image_size (`int`, *optional*, defaults to 224):
|
30 |
+
The size (resolution) of each image.
|
31 |
+
qkv_bias (`bool`, *optional*, defaults to `False`):
|
32 |
+
Whether to add a bias to the queries and values in the self-attention layers.
|
33 |
+
hidden_size (`int`, *optional*, defaults to 3200):
|
34 |
+
Dimensionality of the encoder layers and the pooler layer.
|
35 |
+
num_attention_heads (`int`, *optional*, defaults to 25):
|
36 |
+
Number of attention heads for each attention layer in the Transformer encoder.
|
37 |
+
intermediate_size (`int`, *optional*, defaults to 12800):
|
38 |
+
Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder.
|
39 |
+
qk_normalization (`bool`, *optional*, defaults to `True`):
|
40 |
+
Whether to normalize the queries and keys in the self-attention layers.
|
41 |
+
num_hidden_layers (`int`, *optional*, defaults to 48):
|
42 |
+
Number of hidden layers in the Transformer encoder.
|
43 |
+
use_flash_attn (`bool`, *optional*, defaults to `True`):
|
44 |
+
Whether to use flash attention mechanism.
|
45 |
+
hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`):
|
46 |
+
The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
|
47 |
+
`"relu"`, `"selu"` and `"gelu_new"` ``"gelu"` are supported.
|
48 |
+
layer_norm_eps (`float`, *optional*, defaults to 1e-6):
|
49 |
+
The epsilon used by the layer normalization layers.
|
50 |
+
dropout (`float`, *optional*, defaults to 0.0):
|
51 |
+
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
|
52 |
+
drop_path_rate (`float`, *optional*, defaults to 0.0):
|
53 |
+
Dropout rate for stochastic depth.
|
54 |
+
attention_dropout (`float`, *optional*, defaults to 0.0):
|
55 |
+
The dropout ratio for the attention probabilities.
|
56 |
+
initializer_range (`float`, *optional*, defaults to 0.02):
|
57 |
+
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
|
58 |
+
initializer_factor (`float`, *optional*, defaults to 0.1):
|
59 |
+
A factor for layer scale.
|
60 |
+
"""
|
61 |
+
|
62 |
+
model_type = 'intern_vit_6b'
|
63 |
+
|
64 |
+
def __init__(
|
65 |
+
self,
|
66 |
+
num_channels=3,
|
67 |
+
patch_size=14,
|
68 |
+
image_size=224,
|
69 |
+
qkv_bias=False,
|
70 |
+
hidden_size=3200,
|
71 |
+
num_attention_heads=25,
|
72 |
+
intermediate_size=12800,
|
73 |
+
qk_normalization=True,
|
74 |
+
num_hidden_layers=48,
|
75 |
+
use_flash_attn=True,
|
76 |
+
hidden_act='gelu',
|
77 |
+
norm_type='rms_norm',
|
78 |
+
layer_norm_eps=1e-6,
|
79 |
+
dropout=0.0,
|
80 |
+
drop_path_rate=0.0,
|
81 |
+
attention_dropout=0.0,
|
82 |
+
initializer_range=0.02,
|
83 |
+
initializer_factor=0.1,
|
84 |
+
**kwargs,
|
85 |
+
):
|
86 |
+
super().__init__(**kwargs)
|
87 |
+
|
88 |
+
self.hidden_size = hidden_size
|
89 |
+
self.intermediate_size = intermediate_size
|
90 |
+
self.dropout = dropout
|
91 |
+
self.drop_path_rate = drop_path_rate
|
92 |
+
self.num_hidden_layers = num_hidden_layers
|
93 |
+
self.num_attention_heads = num_attention_heads
|
94 |
+
self.num_channels = num_channels
|
95 |
+
self.patch_size = patch_size
|
96 |
+
self.image_size = image_size
|
97 |
+
self.initializer_range = initializer_range
|
98 |
+
self.initializer_factor = initializer_factor
|
99 |
+
self.attention_dropout = attention_dropout
|
100 |
+
self.layer_norm_eps = layer_norm_eps
|
101 |
+
self.hidden_act = hidden_act
|
102 |
+
self.norm_type = norm_type
|
103 |
+
self.qkv_bias = qkv_bias
|
104 |
+
self.qk_normalization = qk_normalization
|
105 |
+
self.use_flash_attn = use_flash_attn
|
106 |
+
|
107 |
+
@classmethod
|
108 |
+
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> 'PretrainedConfig':
|
109 |
+
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
|
110 |
+
|
111 |
+
if 'vision_config' in config_dict:
|
112 |
+
config_dict = config_dict['vision_config']
|
113 |
+
|
114 |
+
if 'model_type' in config_dict and hasattr(cls, 'model_type') and config_dict['model_type'] != cls.model_type:
|
115 |
+
logger.warning(
|
116 |
+
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
|
117 |
+
f'{cls.model_type}. This is not supported for all configurations of models and can yield errors.'
|
118 |
+
)
|
119 |
+
|
120 |
+
return cls.from_dict(config_dict, **kwargs)
|
configuration_internvl_chat.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# --------------------------------------------------------
|
2 |
+
# InternVL
|
3 |
+
# Copyright (c) 2024 OpenGVLab
|
4 |
+
# Licensed under The MIT License [see LICENSE for details]
|
5 |
+
# --------------------------------------------------------
|
6 |
+
|
7 |
+
import copy
|
8 |
+
|
9 |
+
from transformers import AutoConfig, LlamaConfig, Qwen2Config
|
10 |
+
from transformers.configuration_utils import PretrainedConfig
|
11 |
+
from transformers.utils import logging
|
12 |
+
|
13 |
+
from .configuration_intern_vit import InternVisionConfig
|
14 |
+
|
15 |
+
logger = logging.get_logger(__name__)
|
16 |
+
|
17 |
+
|
18 |
+
class InternVLChatConfig(PretrainedConfig):
|
19 |
+
model_type = 'internvl_chat'
|
20 |
+
is_composition = True
|
21 |
+
|
22 |
+
def __init__(
|
23 |
+
self,
|
24 |
+
vision_config=None,
|
25 |
+
llm_config=None,
|
26 |
+
use_backbone_lora=0,
|
27 |
+
use_llm_lora=0,
|
28 |
+
select_layer=-1,
|
29 |
+
force_image_size=None,
|
30 |
+
downsample_ratio=0.5,
|
31 |
+
template=None,
|
32 |
+
dynamic_image_size=False,
|
33 |
+
use_thumbnail=False,
|
34 |
+
ps_version='v1',
|
35 |
+
min_dynamic_patch=1,
|
36 |
+
max_dynamic_patch=6,
|
37 |
+
**kwargs):
|
38 |
+
super().__init__(**kwargs)
|
39 |
+
|
40 |
+
if vision_config is None:
|
41 |
+
vision_config = {'architectures': ['InternVisionModel']}
|
42 |
+
logger.info('vision_config is None. Initializing the InternVisionConfig with default values.')
|
43 |
+
|
44 |
+
if llm_config is None:
|
45 |
+
llm_config = {'architectures': ['Qwen2ForCausalLM']}
|
46 |
+
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
47 |
+
|
48 |
+
self.vision_config = InternVisionConfig(**vision_config)
|
49 |
+
if llm_config.get('architectures')[0] == 'LlamaForCausalLM':
|
50 |
+
self.llm_config = LlamaConfig(**llm_config)
|
51 |
+
elif llm_config.get('architectures')[0] == 'Qwen2ForCausalLM':
|
52 |
+
self.llm_config = Qwen2Config(**llm_config)
|
53 |
+
else:
|
54 |
+
raise ValueError('Unsupported architecture: {}'.format(llm_config.get('architectures')[0]))
|
55 |
+
self.use_backbone_lora = use_backbone_lora
|
56 |
+
self.use_llm_lora = use_llm_lora
|
57 |
+
self.select_layer = select_layer
|
58 |
+
self.force_image_size = force_image_size
|
59 |
+
self.downsample_ratio = downsample_ratio
|
60 |
+
self.template = template
|
61 |
+
self.dynamic_image_size = dynamic_image_size
|
62 |
+
self.use_thumbnail = use_thumbnail
|
63 |
+
self.ps_version = ps_version # pixel shuffle version
|
64 |
+
self.min_dynamic_patch = min_dynamic_patch
|
65 |
+
self.max_dynamic_patch = max_dynamic_patch
|
66 |
+
# By default, we use tie_word_embeddings=False for models of all sizes.
|
67 |
+
self.tie_word_embeddings = self.llm_config.tie_word_embeddings
|
68 |
+
|
69 |
+
logger.info(f'vision_select_layer: {self.select_layer}')
|
70 |
+
logger.info(f'ps_version: {self.ps_version}')
|
71 |
+
logger.info(f'min_dynamic_patch: {self.min_dynamic_patch}')
|
72 |
+
logger.info(f'max_dynamic_patch: {self.max_dynamic_patch}')
|
73 |
+
|
74 |
+
def to_dict(self):
|
75 |
+
"""
|
76 |
+
Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`].
|
77 |
+
|
78 |
+
Returns:
|
79 |
+
`Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance,
|
80 |
+
"""
|
81 |
+
output = copy.deepcopy(self.__dict__)
|
82 |
+
output['vision_config'] = self.vision_config.to_dict()
|
83 |
+
output['llm_config'] = self.llm_config.to_dict()
|
84 |
+
output['model_type'] = self.__class__.model_type
|
85 |
+
output['use_backbone_lora'] = self.use_backbone_lora
|
86 |
+
output['use_llm_lora'] = self.use_llm_lora
|
87 |
+
output['select_layer'] = self.select_layer
|
88 |
+
output['force_image_size'] = self.force_image_size
|
89 |
+
output['downsample_ratio'] = self.downsample_ratio
|
90 |
+
output['template'] = self.template
|
91 |
+
output['dynamic_image_size'] = self.dynamic_image_size
|
92 |
+
output['use_thumbnail'] = self.use_thumbnail
|
93 |
+
output['ps_version'] = self.ps_version
|
94 |
+
output['min_dynamic_patch'] = self.min_dynamic_patch
|
95 |
+
output['max_dynamic_patch'] = self.max_dynamic_patch
|
96 |
+
|
97 |
+
return output
|
generation_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"transformers_version": "4.52.3"
|
4 |
+
}
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
openvino_config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"dtype": "int4",
|
3 |
+
"input_info": null,
|
4 |
+
"optimum_version": "1.25.3",
|
5 |
+
"quantization_config": {
|
6 |
+
"all_layers": null,
|
7 |
+
"backup_precision": null,
|
8 |
+
"bits": 4,
|
9 |
+
"dataset": null,
|
10 |
+
"dtype": "int4",
|
11 |
+
"gptq": null,
|
12 |
+
"group_size": 128,
|
13 |
+
"ignored_scope": null,
|
14 |
+
"lora_correction": null,
|
15 |
+
"num_samples": null,
|
16 |
+
"processor": null,
|
17 |
+
"quant_method": "default",
|
18 |
+
"ratio": 1.0,
|
19 |
+
"scale_estimation": null,
|
20 |
+
"sensitivity_metric": null,
|
21 |
+
"sym": false,
|
22 |
+
"tokenizer": null,
|
23 |
+
"trust_remote_code": false
|
24 |
+
},
|
25 |
+
"save_onnx_model": false,
|
26 |
+
"transformers_version": "4.52.3"
|
27 |
+
}
|
openvino_detokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a0a0d75ca233db27305df00cf870e8cf0e06c6b391d5f22ea0cc5f7afeab38fe
|
3 |
+
size 2189806
|
openvino_detokenizer.xml
ADDED
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="detokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_1631039" type="Parameter" version="opset1">
|
5 |
+
<data shape="?,?" element_type="i64" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="I64" names="Parameter_1631039">
|
8 |
+
<dim>-1</dim>
|
9 |
+
<dim>-1</dim>
|
10 |
+
</port>
|
11 |
+
</output>
|
12 |
+
</layer>
|
13 |
+
<layer id="1" name="Convert_1631209" type="Convert" version="opset1">
|
14 |
+
<data destination_type="i32" />
|
15 |
+
<input>
|
16 |
+
<port id="0" precision="I64">
|
17 |
+
<dim>-1</dim>
|
18 |
+
<dim>-1</dim>
|
19 |
+
</port>
|
20 |
+
</input>
|
21 |
+
<output>
|
22 |
+
<port id="1" precision="I32">
|
23 |
+
<dim>-1</dim>
|
24 |
+
<dim>-1</dim>
|
25 |
+
</port>
|
26 |
+
</output>
|
27 |
+
</layer>
|
28 |
+
<layer id="2" name="Constant_1631041" type="Const" version="opset1">
|
29 |
+
<data element_type="i32" shape="151674" offset="0" size="606696" />
|
30 |
+
<output>
|
31 |
+
<port id="0" precision="I32">
|
32 |
+
<dim>151674</dim>
|
33 |
+
</port>
|
34 |
+
</output>
|
35 |
+
</layer>
|
36 |
+
<layer id="3" name="Constant_1631043" type="Const" version="opset1">
|
37 |
+
<data element_type="i32" shape="151674" offset="606696" size="606696" />
|
38 |
+
<output>
|
39 |
+
<port id="0" precision="I32">
|
40 |
+
<dim>151674</dim>
|
41 |
+
</port>
|
42 |
+
</output>
|
43 |
+
</layer>
|
44 |
+
<layer id="4" name="Constant_1631045" type="Const" version="opset1">
|
45 |
+
<data element_type="u8" shape="976322" offset="1213392" size="976322" />
|
46 |
+
<output>
|
47 |
+
<port id="0" precision="U8">
|
48 |
+
<dim>976322</dim>
|
49 |
+
</port>
|
50 |
+
</output>
|
51 |
+
</layer>
|
52 |
+
<layer id="5" name="Slice_1631050" type="Const" version="opset1">
|
53 |
+
<data element_type="i32" shape="23" offset="2189714" size="92" />
|
54 |
+
<output>
|
55 |
+
<port id="0" precision="I32">
|
56 |
+
<dim>23</dim>
|
57 |
+
</port>
|
58 |
+
</output>
|
59 |
+
</layer>
|
60 |
+
<layer id="6" name="VocabDecoder_1631052" type="VocabDecoder" version="extension">
|
61 |
+
<data skip_tokens="" />
|
62 |
+
<input>
|
63 |
+
<port id="0" precision="I32">
|
64 |
+
<dim>-1</dim>
|
65 |
+
<dim>-1</dim>
|
66 |
+
</port>
|
67 |
+
<port id="1" precision="I32">
|
68 |
+
<dim>151674</dim>
|
69 |
+
</port>
|
70 |
+
<port id="2" precision="I32">
|
71 |
+
<dim>151674</dim>
|
72 |
+
</port>
|
73 |
+
<port id="3" precision="U8">
|
74 |
+
<dim>976322</dim>
|
75 |
+
</port>
|
76 |
+
<port id="4" precision="I32">
|
77 |
+
<dim>23</dim>
|
78 |
+
</port>
|
79 |
+
</input>
|
80 |
+
<output>
|
81 |
+
<port id="5" precision="I32">
|
82 |
+
<dim>-1</dim>
|
83 |
+
</port>
|
84 |
+
<port id="6" precision="I32">
|
85 |
+
<dim>-1</dim>
|
86 |
+
</port>
|
87 |
+
<port id="7" precision="I32">
|
88 |
+
<dim>-1</dim>
|
89 |
+
</port>
|
90 |
+
<port id="8" precision="I32">
|
91 |
+
<dim>-1</dim>
|
92 |
+
</port>
|
93 |
+
<port id="9" precision="U8">
|
94 |
+
<dim>-1</dim>
|
95 |
+
</port>
|
96 |
+
</output>
|
97 |
+
</layer>
|
98 |
+
<layer id="7" name="FuzeRagged_1631053" type="FuzeRagged" version="extension">
|
99 |
+
<input>
|
100 |
+
<port id="0" precision="I32">
|
101 |
+
<dim>-1</dim>
|
102 |
+
</port>
|
103 |
+
<port id="1" precision="I32">
|
104 |
+
<dim>-1</dim>
|
105 |
+
</port>
|
106 |
+
<port id="2" precision="I32">
|
107 |
+
<dim>-1</dim>
|
108 |
+
</port>
|
109 |
+
<port id="3" precision="I32">
|
110 |
+
<dim>-1</dim>
|
111 |
+
</port>
|
112 |
+
</input>
|
113 |
+
<output>
|
114 |
+
<port id="4" precision="I32">
|
115 |
+
<dim>-1</dim>
|
116 |
+
</port>
|
117 |
+
<port id="5" precision="I32">
|
118 |
+
<dim>-1</dim>
|
119 |
+
</port>
|
120 |
+
</output>
|
121 |
+
</layer>
|
122 |
+
<layer id="8" name="UTF8Validate_1631054" type="UTF8Validate" version="extension">
|
123 |
+
<data replace_mode="true" />
|
124 |
+
<input>
|
125 |
+
<port id="0" precision="I32">
|
126 |
+
<dim>-1</dim>
|
127 |
+
</port>
|
128 |
+
<port id="1" precision="I32">
|
129 |
+
<dim>-1</dim>
|
130 |
+
</port>
|
131 |
+
<port id="2" precision="U8">
|
132 |
+
<dim>-1</dim>
|
133 |
+
</port>
|
134 |
+
</input>
|
135 |
+
<output>
|
136 |
+
<port id="3" precision="I32">
|
137 |
+
<dim>-1</dim>
|
138 |
+
</port>
|
139 |
+
<port id="4" precision="I32">
|
140 |
+
<dim>-1</dim>
|
141 |
+
</port>
|
142 |
+
<port id="5" precision="U8">
|
143 |
+
<dim>-1</dim>
|
144 |
+
</port>
|
145 |
+
</output>
|
146 |
+
</layer>
|
147 |
+
<layer id="9" name="StringTensorPack_1631055" type="StringTensorPack" version="opset15">
|
148 |
+
<input>
|
149 |
+
<port id="0" precision="I32">
|
150 |
+
<dim>-1</dim>
|
151 |
+
</port>
|
152 |
+
<port id="1" precision="I32">
|
153 |
+
<dim>-1</dim>
|
154 |
+
</port>
|
155 |
+
<port id="2" precision="U8">
|
156 |
+
<dim>-1</dim>
|
157 |
+
</port>
|
158 |
+
</input>
|
159 |
+
<output>
|
160 |
+
<port id="3" precision="STRING" names="Result_1631056,string_output">
|
161 |
+
<dim>-1</dim>
|
162 |
+
</port>
|
163 |
+
</output>
|
164 |
+
</layer>
|
165 |
+
<layer id="10" name="Result_1631056" type="Result" version="opset1" output_names="Result_1631056,string_output">
|
166 |
+
<input>
|
167 |
+
<port id="0" precision="STRING">
|
168 |
+
<dim>-1</dim>
|
169 |
+
</port>
|
170 |
+
</input>
|
171 |
+
</layer>
|
172 |
+
</layers>
|
173 |
+
<edges>
|
174 |
+
<edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
|
175 |
+
<edge from-layer="1" from-port="1" to-layer="6" to-port="0" />
|
176 |
+
<edge from-layer="2" from-port="0" to-layer="6" to-port="1" />
|
177 |
+
<edge from-layer="3" from-port="0" to-layer="6" to-port="2" />
|
178 |
+
<edge from-layer="4" from-port="0" to-layer="6" to-port="3" />
|
179 |
+
<edge from-layer="5" from-port="0" to-layer="6" to-port="4" />
|
180 |
+
<edge from-layer="6" from-port="7" to-layer="7" to-port="2" />
|
181 |
+
<edge from-layer="6" from-port="9" to-layer="8" to-port="2" />
|
182 |
+
<edge from-layer="6" from-port="8" to-layer="7" to-port="3" />
|
183 |
+
<edge from-layer="6" from-port="6" to-layer="7" to-port="1" />
|
184 |
+
<edge from-layer="6" from-port="5" to-layer="7" to-port="0" />
|
185 |
+
<edge from-layer="7" from-port="4" to-layer="8" to-port="0" />
|
186 |
+
<edge from-layer="7" from-port="5" to-layer="8" to-port="1" />
|
187 |
+
<edge from-layer="8" from-port="3" to-layer="9" to-port="0" />
|
188 |
+
<edge from-layer="8" from-port="4" to-layer="9" to-port="1" />
|
189 |
+
<edge from-layer="8" from-port="5" to-layer="9" to-port="2" />
|
190 |
+
<edge from-layer="9" from-port="3" to-layer="10" to-port="0" />
|
191 |
+
</edges>
|
192 |
+
<rt_info>
|
193 |
+
<add_attention_mask value="True" />
|
194 |
+
<add_prefix_space />
|
195 |
+
<add_special_tokens value="True" />
|
196 |
+
<chat_template value="{%- if tools %} {{- '<|im_start|>system\n' }} {%- if messages[0]['role'] == 'system' %} {{- messages[0]['content'] }} {%- else %} {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }} {%- endif %} {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }} {%- for tool in tools %} {{- "\n" }} {{- tool | tojson }} {%- endfor %} {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }} {%- else %} {%- if messages[0]['role'] == 'system' %} {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }} {%- else %} {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }} {%- endif %} {%- endif %} {%- for message in messages %} {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %} {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }} {%- elif message.role == "assistant" %} {{- '<|im_start|>' + message.role }} {%- if message.content %} {{- '\n' + message.content }} {%- endif %} {%- for tool_call in message.tool_calls %} {%- if tool_call.function is defined %} {%- set tool_call = tool_call.function %} {%- endif %} {{- '\n<tool_call>\n{"name": "' }} {{- tool_call.name }} {{- '", "arguments": ' }} {{- tool_call.arguments | tojson }} {{- '}\n</tool_call>' }} {%- endfor %} {{- '<|im_end|>\n' }} {%- elif message.role == "tool" %} {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %} {{- '<|im_start|>user' }} {%- endif %} {{- '\n<tool_response>\n' }} {{- message.content }} {{- '\n</tool_response>' }} {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %} {{- '<|im_end|>\n' }} {%- endif %} {%- endif %} {%- endfor %} {%- if add_generation_prompt %} {{- '<|im_start|>assistant\n' }} {%- endif %} " />
|
197 |
+
<clean_up_tokenization_spaces />
|
198 |
+
<detokenizer_input_type value="i64" />
|
199 |
+
<eos_token_id value="151645" />
|
200 |
+
<handle_special_tokens_with_re />
|
201 |
+
<max_length />
|
202 |
+
<number_of_inputs value="1" />
|
203 |
+
<openvino_tokenizers_version value="2025.1.0.0-523-710ddf14de8" />
|
204 |
+
<openvino_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
|
205 |
+
<original_post_processor_template value="{"type": "ByteLevel", "add_prefix_space": false, "trim_offsets": false, "use_regex": false}" />
|
206 |
+
<original_tokenizer_class value="<class 'transformers.models.qwen2.tokenization_qwen2_fast.Qwen2TokenizerFast'>" />
|
207 |
+
<pad_token_id value="151643" />
|
208 |
+
<sentencepiece_version value="0.2.0" />
|
209 |
+
<skip_special_tokens value="True" />
|
210 |
+
<streaming_detokenizer value="False" />
|
211 |
+
<tiktoken_version value="0.9.0" />
|
212 |
+
<tokenizer_output_type value="i64" />
|
213 |
+
<tokenizers_version value="0.21.1" />
|
214 |
+
<transformers_version value="4.52.3" />
|
215 |
+
<use_max_padding value="False" />
|
216 |
+
<use_sentencepiece_backend value="False" />
|
217 |
+
<utf8_replace_mode value="replace" />
|
218 |
+
<with_detokenizer value="True" />
|
219 |
+
</rt_info>
|
220 |
+
</net>
|
openvino_language_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5a63f29f2125e45a9b6c816773067d417496884176e0460766020d1644d30d9b
|
3 |
+
size 914579394
|
openvino_language_model.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|
openvino_text_embeddings_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2c018fddd902ef9763b660558c104cfed02596a34e3f5301673b62e889fc5048
|
3 |
+
size 233274616
|
openvino_text_embeddings_model.xml
ADDED
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="Model3" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="input" type="Parameter" version="opset1">
|
5 |
+
<data shape="?,?" element_type="i64" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="I64" names="input">
|
8 |
+
<dim>-1</dim>
|
9 |
+
<dim>-1</dim>
|
10 |
+
</port>
|
11 |
+
</output>
|
12 |
+
</layer>
|
13 |
+
<layer id="1" name="self.weight" type="Const" version="opset1">
|
14 |
+
<data element_type="i8" shape="151674, 1536" offset="0" size="232971264" />
|
15 |
+
<output>
|
16 |
+
<port id="0" precision="I8">
|
17 |
+
<dim>151674</dim>
|
18 |
+
<dim>1536</dim>
|
19 |
+
</port>
|
20 |
+
</output>
|
21 |
+
</layer>
|
22 |
+
<layer id="2" name="Convert_1145818" type="Convert" version="opset1">
|
23 |
+
<data destination_type="f16" />
|
24 |
+
<input>
|
25 |
+
<port id="0" precision="I8">
|
26 |
+
<dim>151674</dim>
|
27 |
+
<dim>1536</dim>
|
28 |
+
</port>
|
29 |
+
</input>
|
30 |
+
<output>
|
31 |
+
<port id="1" precision="FP16">
|
32 |
+
<dim>151674</dim>
|
33 |
+
<dim>1536</dim>
|
34 |
+
</port>
|
35 |
+
</output>
|
36 |
+
</layer>
|
37 |
+
<layer id="3" name="self.weight/scale" type="Const" version="opset1">
|
38 |
+
<data element_type="f16" shape="151674, 1" offset="232971264" size="303348" />
|
39 |
+
<output>
|
40 |
+
<port id="0" precision="FP16">
|
41 |
+
<dim>151674</dim>
|
42 |
+
<dim>1</dim>
|
43 |
+
</port>
|
44 |
+
</output>
|
45 |
+
</layer>
|
46 |
+
<layer id="4" name="self.weight/fq_weights_0" type="Multiply" version="opset1">
|
47 |
+
<data auto_broadcast="numpy" />
|
48 |
+
<input>
|
49 |
+
<port id="0" precision="FP16">
|
50 |
+
<dim>151674</dim>
|
51 |
+
<dim>1536</dim>
|
52 |
+
</port>
|
53 |
+
<port id="1" precision="FP16">
|
54 |
+
<dim>151674</dim>
|
55 |
+
<dim>1</dim>
|
56 |
+
</port>
|
57 |
+
</input>
|
58 |
+
<output>
|
59 |
+
<port id="2" precision="FP16">
|
60 |
+
<dim>151674</dim>
|
61 |
+
<dim>1536</dim>
|
62 |
+
</port>
|
63 |
+
</output>
|
64 |
+
</layer>
|
65 |
+
<layer id="5" name="ov_ext::embedding/Convert" type="Convert" version="opset1">
|
66 |
+
<data destination_type="f32" />
|
67 |
+
<rt_info>
|
68 |
+
<attribute name="decompression" version="0" />
|
69 |
+
</rt_info>
|
70 |
+
<input>
|
71 |
+
<port id="0" precision="FP16">
|
72 |
+
<dim>151674</dim>
|
73 |
+
<dim>1536</dim>
|
74 |
+
</port>
|
75 |
+
</input>
|
76 |
+
<output>
|
77 |
+
<port id="1" precision="FP32">
|
78 |
+
<dim>151674</dim>
|
79 |
+
<dim>1536</dim>
|
80 |
+
</port>
|
81 |
+
</output>
|
82 |
+
</layer>
|
83 |
+
<layer id="6" name="ov_ext::embedding/Convert_1" type="Convert" version="opset1">
|
84 |
+
<data destination_type="i32" />
|
85 |
+
<input>
|
86 |
+
<port id="0" precision="I64">
|
87 |
+
<dim>-1</dim>
|
88 |
+
<dim>-1</dim>
|
89 |
+
</port>
|
90 |
+
</input>
|
91 |
+
<output>
|
92 |
+
<port id="1" precision="I32">
|
93 |
+
<dim>-1</dim>
|
94 |
+
<dim>-1</dim>
|
95 |
+
</port>
|
96 |
+
</output>
|
97 |
+
</layer>
|
98 |
+
<layer id="7" name="ov_ext::embedding/Constant" type="Const" version="opset1">
|
99 |
+
<data element_type="i32" shape="" offset="233274612" size="4" />
|
100 |
+
<output>
|
101 |
+
<port id="0" precision="I32" />
|
102 |
+
</output>
|
103 |
+
</layer>
|
104 |
+
<layer id="8" name="ov_ext::embedding/Gather" type="Gather" version="opset8">
|
105 |
+
<data batch_dims="0" />
|
106 |
+
<input>
|
107 |
+
<port id="0" precision="FP32">
|
108 |
+
<dim>151674</dim>
|
109 |
+
<dim>1536</dim>
|
110 |
+
</port>
|
111 |
+
<port id="1" precision="I32">
|
112 |
+
<dim>-1</dim>
|
113 |
+
<dim>-1</dim>
|
114 |
+
</port>
|
115 |
+
<port id="2" precision="I32" />
|
116 |
+
</input>
|
117 |
+
<output>
|
118 |
+
<port id="3" precision="FP32" names="inputs_embeds">
|
119 |
+
<dim>-1</dim>
|
120 |
+
<dim>-1</dim>
|
121 |
+
<dim>1536</dim>
|
122 |
+
</port>
|
123 |
+
</output>
|
124 |
+
</layer>
|
125 |
+
<layer id="9" name="Result_21649" type="Result" version="opset1" output_names="inputs_embeds">
|
126 |
+
<input>
|
127 |
+
<port id="0" precision="FP32">
|
128 |
+
<dim>-1</dim>
|
129 |
+
<dim>-1</dim>
|
130 |
+
<dim>1536</dim>
|
131 |
+
</port>
|
132 |
+
</input>
|
133 |
+
</layer>
|
134 |
+
</layers>
|
135 |
+
<edges>
|
136 |
+
<edge from-layer="0" from-port="0" to-layer="6" to-port="0" />
|
137 |
+
<edge from-layer="1" from-port="0" to-layer="2" to-port="0" />
|
138 |
+
<edge from-layer="2" from-port="1" to-layer="4" to-port="0" />
|
139 |
+
<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
|
140 |
+
<edge from-layer="4" from-port="2" to-layer="5" to-port="0" />
|
141 |
+
<edge from-layer="5" from-port="1" to-layer="8" to-port="0" />
|
142 |
+
<edge from-layer="6" from-port="1" to-layer="8" to-port="1" />
|
143 |
+
<edge from-layer="7" from-port="0" to-layer="8" to-port="2" />
|
144 |
+
<edge from-layer="8" from-port="3" to-layer="9" to-port="0" />
|
145 |
+
</edges>
|
146 |
+
<rt_info>
|
147 |
+
<Runtime_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
|
148 |
+
<conversion_parameters>
|
149 |
+
<framework value="pytorch" />
|
150 |
+
<is_python_object value="True" />
|
151 |
+
</conversion_parameters>
|
152 |
+
<nncf>
|
153 |
+
<friendly_names_were_updated value="True" />
|
154 |
+
<weight_compression>
|
155 |
+
<advanced_parameters value="{'statistics_path': None, 'awq_params': {'subset_size': 32, 'percent_to_apply': 0.002, 'alpha_min': 0.0, 'alpha_max': 1.0, 'steps': 100}, 'scale_estimation_params': {'subset_size': 64, 'initial_steps': 5, 'scale_steps': 5, 'weight_penalty': -1.0}, 'gptq_params': {'damp_percent': 0.1, 'block_size': 128, 'subset_size': 128}, 'lora_correction_params': {'adapter_rank': 8, 'num_iterations': 3, 'apply_regularization': True, 'subset_size': 128, 'use_int8_adapters': True}}" />
|
156 |
+
<all_layers value="False" />
|
157 |
+
<awq value="False" />
|
158 |
+
<backup_mode value="int8_asym" />
|
159 |
+
<gptq value="False" />
|
160 |
+
<group_size value="-1" />
|
161 |
+
<ignored_scope value="[]" />
|
162 |
+
<lora_correction value="False" />
|
163 |
+
<mode value="int8_sym" />
|
164 |
+
<ratio value="1.0" />
|
165 |
+
<scale_estimation value="False" />
|
166 |
+
<sensitivity_metric value="weight_quantization_error" />
|
167 |
+
</weight_compression>
|
168 |
+
</nncf>
|
169 |
+
<optimum>
|
170 |
+
<nncf_version value="2.15.0" />
|
171 |
+
<optimum_intel_version value="1.24.0.dev0+dba7dce" />
|
172 |
+
<optimum_version value="1.25.3" />
|
173 |
+
<pytorch_version value="2.6.0" />
|
174 |
+
<transformers_version value="4.52.3" />
|
175 |
+
</optimum>
|
176 |
+
</rt_info>
|
177 |
+
</net>
|
openvino_tokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5365a0a3c9f496090c293249d6b76fac76f52910e7d2dbbccc3393aea97c1467
|
3 |
+
size 5589034
|
openvino_tokenizer.xml
ADDED
@@ -0,0 +1,721 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="tokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_1630920" type="Parameter" version="opset1">
|
5 |
+
<data shape="?" element_type="string" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="STRING" names="Parameter_1630920">
|
8 |
+
<dim>-1</dim>
|
9 |
+
</port>
|
10 |
+
</output>
|
11 |
+
</layer>
|
12 |
+
<layer id="1" name="Constant_1630926" type="Const" version="opset1">
|
13 |
+
<data element_type="i64" shape="" offset="0" size="8" />
|
14 |
+
<output>
|
15 |
+
<port id="0" precision="I64" />
|
16 |
+
</output>
|
17 |
+
</layer>
|
18 |
+
<layer id="2" name="StringTensorUnpack_1630921" type="StringTensorUnpack" version="opset15">
|
19 |
+
<input>
|
20 |
+
<port id="0" precision="STRING">
|
21 |
+
<dim>-1</dim>
|
22 |
+
</port>
|
23 |
+
</input>
|
24 |
+
<output>
|
25 |
+
<port id="1" precision="I32">
|
26 |
+
<dim>-1</dim>
|
27 |
+
</port>
|
28 |
+
<port id="2" precision="I32">
|
29 |
+
<dim>-1</dim>
|
30 |
+
</port>
|
31 |
+
<port id="3" precision="U8">
|
32 |
+
<dim>-1</dim>
|
33 |
+
</port>
|
34 |
+
</output>
|
35 |
+
</layer>
|
36 |
+
<layer id="3" name="ShapeOf_1630922" type="ShapeOf" version="opset3">
|
37 |
+
<data output_type="i64" />
|
38 |
+
<input>
|
39 |
+
<port id="0" precision="I32">
|
40 |
+
<dim>-1</dim>
|
41 |
+
</port>
|
42 |
+
</input>
|
43 |
+
<output>
|
44 |
+
<port id="1" precision="I64">
|
45 |
+
<dim>1</dim>
|
46 |
+
</port>
|
47 |
+
</output>
|
48 |
+
</layer>
|
49 |
+
<layer id="4" name="Constant_1630923" type="Const" version="opset1">
|
50 |
+
<data element_type="i64" shape="" offset="0" size="8" />
|
51 |
+
<output>
|
52 |
+
<port id="0" precision="I64" />
|
53 |
+
</output>
|
54 |
+
</layer>
|
55 |
+
<layer id="5" name="Constant_1630924" type="Const" version="opset1">
|
56 |
+
<data element_type="i64" shape="" offset="0" size="8" />
|
57 |
+
<output>
|
58 |
+
<port id="0" precision="I64" />
|
59 |
+
</output>
|
60 |
+
</layer>
|
61 |
+
<layer id="6" name="Gather_1630925" type="Gather" version="opset8">
|
62 |
+
<data batch_dims="0" />
|
63 |
+
<input>
|
64 |
+
<port id="0" precision="I64">
|
65 |
+
<dim>1</dim>
|
66 |
+
</port>
|
67 |
+
<port id="1" precision="I64" />
|
68 |
+
<port id="2" precision="I64" />
|
69 |
+
</input>
|
70 |
+
<output>
|
71 |
+
<port id="3" precision="I64" />
|
72 |
+
</output>
|
73 |
+
</layer>
|
74 |
+
<layer id="7" name="Constant_1630927" type="Const" version="opset1">
|
75 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
76 |
+
<output>
|
77 |
+
<port id="0" precision="I64" />
|
78 |
+
</output>
|
79 |
+
</layer>
|
80 |
+
<layer id="8" name="Range_1630928" type="Range" version="opset4">
|
81 |
+
<data output_type="i32" />
|
82 |
+
<input>
|
83 |
+
<port id="0" precision="I64" />
|
84 |
+
<port id="1" precision="I64" />
|
85 |
+
<port id="2" precision="I64" />
|
86 |
+
</input>
|
87 |
+
<output>
|
88 |
+
<port id="3" precision="I32">
|
89 |
+
<dim>-1</dim>
|
90 |
+
</port>
|
91 |
+
</output>
|
92 |
+
</layer>
|
93 |
+
<layer id="9" name="Constant_1630929" type="Const" version="opset1">
|
94 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
95 |
+
<output>
|
96 |
+
<port id="0" precision="I64" />
|
97 |
+
</output>
|
98 |
+
</layer>
|
99 |
+
<layer id="10" name="Constant_1630930" type="Const" version="opset1">
|
100 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
101 |
+
<output>
|
102 |
+
<port id="0" precision="I64" />
|
103 |
+
</output>
|
104 |
+
</layer>
|
105 |
+
<layer id="11" name="Add_1630931" type="Add" version="opset1">
|
106 |
+
<data auto_broadcast="numpy" />
|
107 |
+
<input>
|
108 |
+
<port id="0" precision="I64" />
|
109 |
+
<port id="1" precision="I64" />
|
110 |
+
</input>
|
111 |
+
<output>
|
112 |
+
<port id="2" precision="I64" />
|
113 |
+
</output>
|
114 |
+
</layer>
|
115 |
+
<layer id="12" name="Constant_1630932" type="Const" version="opset1">
|
116 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
117 |
+
<output>
|
118 |
+
<port id="0" precision="I64" />
|
119 |
+
</output>
|
120 |
+
</layer>
|
121 |
+
<layer id="13" name="Range_1630933" type="Range" version="opset4">
|
122 |
+
<data output_type="i32" />
|
123 |
+
<input>
|
124 |
+
<port id="0" precision="I64" />
|
125 |
+
<port id="1" precision="I64" />
|
126 |
+
<port id="2" precision="I64" />
|
127 |
+
</input>
|
128 |
+
<output>
|
129 |
+
<port id="3" precision="I32">
|
130 |
+
<dim>-1</dim>
|
131 |
+
</port>
|
132 |
+
</output>
|
133 |
+
</layer>
|
134 |
+
<layer id="14" name="Constant_1630995" type="Const" version="opset1">
|
135 |
+
<data element_type="u8" shape="552" offset="16" size="552" />
|
136 |
+
<output>
|
137 |
+
<port id="0" precision="U8">
|
138 |
+
<dim>552</dim>
|
139 |
+
</port>
|
140 |
+
</output>
|
141 |
+
</layer>
|
142 |
+
<layer id="15" name="SpecialTokensSplit_1630996" type="SpecialTokensSplit" version="extension">
|
143 |
+
<input>
|
144 |
+
<port id="0" precision="I32">
|
145 |
+
<dim>-1</dim>
|
146 |
+
</port>
|
147 |
+
<port id="1" precision="I32">
|
148 |
+
<dim>-1</dim>
|
149 |
+
</port>
|
150 |
+
<port id="2" precision="I32">
|
151 |
+
<dim>-1</dim>
|
152 |
+
</port>
|
153 |
+
<port id="3" precision="I32">
|
154 |
+
<dim>-1</dim>
|
155 |
+
</port>
|
156 |
+
<port id="4" precision="U8">
|
157 |
+
<dim>-1</dim>
|
158 |
+
</port>
|
159 |
+
<port id="5" precision="U8">
|
160 |
+
<dim>552</dim>
|
161 |
+
</port>
|
162 |
+
</input>
|
163 |
+
<output>
|
164 |
+
<port id="6" precision="I32">
|
165 |
+
<dim>-1</dim>
|
166 |
+
</port>
|
167 |
+
<port id="7" precision="I32">
|
168 |
+
<dim>-1</dim>
|
169 |
+
</port>
|
170 |
+
<port id="8" precision="I32">
|
171 |
+
<dim>-1</dim>
|
172 |
+
</port>
|
173 |
+
<port id="9" precision="I32">
|
174 |
+
<dim>-1</dim>
|
175 |
+
</port>
|
176 |
+
<port id="10" precision="U8">
|
177 |
+
<dim>-1</dim>
|
178 |
+
</port>
|
179 |
+
<port id="11" precision="BOOL">
|
180 |
+
<dim>-1</dim>
|
181 |
+
</port>
|
182 |
+
</output>
|
183 |
+
</layer>
|
184 |
+
<layer id="16" name="CharsMapNormalization_1630997" type="CharsMapNormalization" version="extension">
|
185 |
+
<data add_dummy_prefix="false" remove_extra_whitespaces="false" escape_whitespaces="false" normalization_form="nfc" case_fold="false" nmt="false" />
|
186 |
+
<input>
|
187 |
+
<port id="0" precision="I32">
|
188 |
+
<dim>-1</dim>
|
189 |
+
</port>
|
190 |
+
<port id="1" precision="I32">
|
191 |
+
<dim>-1</dim>
|
192 |
+
</port>
|
193 |
+
<port id="2" precision="U8">
|
194 |
+
<dim>-1</dim>
|
195 |
+
</port>
|
196 |
+
<port id="3" precision="BOOL">
|
197 |
+
<dim>-1</dim>
|
198 |
+
</port>
|
199 |
+
</input>
|
200 |
+
<output>
|
201 |
+
<port id="4" precision="I32">
|
202 |
+
<dim>-1</dim>
|
203 |
+
</port>
|
204 |
+
<port id="5" precision="I32">
|
205 |
+
<dim>-1</dim>
|
206 |
+
</port>
|
207 |
+
<port id="6" precision="U8">
|
208 |
+
<dim>-1</dim>
|
209 |
+
</port>
|
210 |
+
<port id="7" precision="BOOL">
|
211 |
+
<dim>-1</dim>
|
212 |
+
</port>
|
213 |
+
</output>
|
214 |
+
</layer>
|
215 |
+
<layer id="17" name="Constant_1630999" type="Const" version="opset1">
|
216 |
+
<data element_type="u8" shape="110" offset="568" size="110" />
|
217 |
+
<output>
|
218 |
+
<port id="0" precision="U8">
|
219 |
+
<dim>110</dim>
|
220 |
+
</port>
|
221 |
+
</output>
|
222 |
+
</layer>
|
223 |
+
<layer id="18" name="RegexSplit_1631000" type="RegexSplit" version="extension">
|
224 |
+
<data behaviour="isolate" invert="false" max_splits="-1" />
|
225 |
+
<input>
|
226 |
+
<port id="0" precision="I32">
|
227 |
+
<dim>-1</dim>
|
228 |
+
</port>
|
229 |
+
<port id="1" precision="I32">
|
230 |
+
<dim>-1</dim>
|
231 |
+
</port>
|
232 |
+
<port id="2" precision="I32">
|
233 |
+
<dim>-1</dim>
|
234 |
+
</port>
|
235 |
+
<port id="3" precision="I32">
|
236 |
+
<dim>-1</dim>
|
237 |
+
</port>
|
238 |
+
<port id="4" precision="U8">
|
239 |
+
<dim>-1</dim>
|
240 |
+
</port>
|
241 |
+
<port id="5" precision="BOOL">
|
242 |
+
<dim>-1</dim>
|
243 |
+
</port>
|
244 |
+
<port id="6" precision="U8">
|
245 |
+
<dim>110</dim>
|
246 |
+
</port>
|
247 |
+
</input>
|
248 |
+
<output>
|
249 |
+
<port id="7" precision="I32">
|
250 |
+
<dim>-1</dim>
|
251 |
+
</port>
|
252 |
+
<port id="8" precision="I32">
|
253 |
+
<dim>-1</dim>
|
254 |
+
</port>
|
255 |
+
<port id="9" precision="I32">
|
256 |
+
<dim>-1</dim>
|
257 |
+
</port>
|
258 |
+
<port id="10" precision="I32">
|
259 |
+
<dim>-1</dim>
|
260 |
+
</port>
|
261 |
+
<port id="11" precision="U8">
|
262 |
+
<dim>-1</dim>
|
263 |
+
</port>
|
264 |
+
<port id="12" precision="BOOL">
|
265 |
+
<dim>-1</dim>
|
266 |
+
</port>
|
267 |
+
</output>
|
268 |
+
</layer>
|
269 |
+
<layer id="19" name="Constant_1631002" type="Const" version="opset1">
|
270 |
+
<data element_type="i32" shape="151674" offset="678" size="606696" />
|
271 |
+
<output>
|
272 |
+
<port id="0" precision="I32">
|
273 |
+
<dim>151674</dim>
|
274 |
+
</port>
|
275 |
+
</output>
|
276 |
+
</layer>
|
277 |
+
<layer id="20" name="Constant_1631004" type="Const" version="opset1">
|
278 |
+
<data element_type="i32" shape="151674" offset="607374" size="606696" />
|
279 |
+
<output>
|
280 |
+
<port id="0" precision="I32">
|
281 |
+
<dim>151674</dim>
|
282 |
+
</port>
|
283 |
+
</output>
|
284 |
+
</layer>
|
285 |
+
<layer id="21" name="Constant_1631006" type="Const" version="opset1">
|
286 |
+
<data element_type="u8" shape="976322" offset="1214070" size="976322" />
|
287 |
+
<output>
|
288 |
+
<port id="0" precision="U8">
|
289 |
+
<dim>976322</dim>
|
290 |
+
</port>
|
291 |
+
</output>
|
292 |
+
</layer>
|
293 |
+
<layer id="22" name="Constant_1631014" type="Const" version="opset1">
|
294 |
+
<data element_type="i32" shape="151387" offset="2190392" size="605548" />
|
295 |
+
<output>
|
296 |
+
<port id="0" precision="I32">
|
297 |
+
<dim>151387</dim>
|
298 |
+
</port>
|
299 |
+
</output>
|
300 |
+
</layer>
|
301 |
+
<layer id="23" name="Constant_1631016" type="Const" version="opset1">
|
302 |
+
<data element_type="i32" shape="151387" offset="2795940" size="605548" />
|
303 |
+
<output>
|
304 |
+
<port id="0" precision="I32">
|
305 |
+
<dim>151387</dim>
|
306 |
+
</port>
|
307 |
+
</output>
|
308 |
+
</layer>
|
309 |
+
<layer id="24" name="Constant_1631018" type="Const" version="opset1">
|
310 |
+
<data element_type="u8" shape="491359" offset="3401488" size="491359" />
|
311 |
+
<output>
|
312 |
+
<port id="0" precision="U8">
|
313 |
+
<dim>491359</dim>
|
314 |
+
</port>
|
315 |
+
</output>
|
316 |
+
</layer>
|
317 |
+
<layer id="25" name="Constant_1631020" type="Const" version="opset1">
|
318 |
+
<data element_type="i32" shape="151387" offset="3892847" size="605548" />
|
319 |
+
<output>
|
320 |
+
<port id="0" precision="I32">
|
321 |
+
<dim>151387</dim>
|
322 |
+
</port>
|
323 |
+
</output>
|
324 |
+
</layer>
|
325 |
+
<layer id="26" name="Constant_1631022" type="Const" version="opset1">
|
326 |
+
<data element_type="i32" shape="151387" offset="4498395" size="605548" />
|
327 |
+
<output>
|
328 |
+
<port id="0" precision="I32">
|
329 |
+
<dim>151387</dim>
|
330 |
+
</port>
|
331 |
+
</output>
|
332 |
+
</layer>
|
333 |
+
<layer id="27" name="Constant_1631024" type="Const" version="opset1">
|
334 |
+
<data element_type="u8" shape="484354" offset="5103943" size="484354" />
|
335 |
+
<output>
|
336 |
+
<port id="0" precision="U8">
|
337 |
+
<dim>484354</dim>
|
338 |
+
</port>
|
339 |
+
</output>
|
340 |
+
</layer>
|
341 |
+
<layer id="28" name="Constant_1631008" type="Const" version="opset1">
|
342 |
+
<data element_type="i32" shape="31" offset="5588297" size="124" />
|
343 |
+
<output>
|
344 |
+
<port id="0" precision="I32">
|
345 |
+
<dim>31</dim>
|
346 |
+
</port>
|
347 |
+
</output>
|
348 |
+
</layer>
|
349 |
+
<layer id="29" name="Constant_1631010" type="Const" version="opset1">
|
350 |
+
<data element_type="i32" shape="31" offset="5588421" size="124" />
|
351 |
+
<output>
|
352 |
+
<port id="0" precision="I32">
|
353 |
+
<dim>31</dim>
|
354 |
+
</port>
|
355 |
+
</output>
|
356 |
+
</layer>
|
357 |
+
<layer id="30" name="Constant_1631012" type="Const" version="opset1">
|
358 |
+
<data element_type="u8" shape="353" offset="5588545" size="353" />
|
359 |
+
<output>
|
360 |
+
<port id="0" precision="U8">
|
361 |
+
<dim>353</dim>
|
362 |
+
</port>
|
363 |
+
</output>
|
364 |
+
</layer>
|
365 |
+
<layer id="31" name="Constant_1631025" type="Const" version="opset1">
|
366 |
+
<data element_type="i32" shape="31" offset="5588898" size="124" />
|
367 |
+
<output>
|
368 |
+
<port id="0" precision="I32">
|
369 |
+
<dim>31</dim>
|
370 |
+
</port>
|
371 |
+
</output>
|
372 |
+
</layer>
|
373 |
+
<layer id="32" name="BPETokenizer_1631026" type="BPETokenizer" version="extension">
|
374 |
+
<data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="30328" />
|
375 |
+
<input>
|
376 |
+
<port id="0" precision="I32">
|
377 |
+
<dim>-1</dim>
|
378 |
+
</port>
|
379 |
+
<port id="1" precision="I32">
|
380 |
+
<dim>-1</dim>
|
381 |
+
</port>
|
382 |
+
<port id="2" precision="I32">
|
383 |
+
<dim>-1</dim>
|
384 |
+
</port>
|
385 |
+
<port id="3" precision="I32">
|
386 |
+
<dim>-1</dim>
|
387 |
+
</port>
|
388 |
+
<port id="4" precision="U8">
|
389 |
+
<dim>-1</dim>
|
390 |
+
</port>
|
391 |
+
<port id="5" precision="I32">
|
392 |
+
<dim>151674</dim>
|
393 |
+
</port>
|
394 |
+
<port id="6" precision="I32">
|
395 |
+
<dim>151674</dim>
|
396 |
+
</port>
|
397 |
+
<port id="7" precision="U8">
|
398 |
+
<dim>976322</dim>
|
399 |
+
</port>
|
400 |
+
<port id="8" precision="I32">
|
401 |
+
<dim>151387</dim>
|
402 |
+
</port>
|
403 |
+
<port id="9" precision="I32">
|
404 |
+
<dim>151387</dim>
|
405 |
+
</port>
|
406 |
+
<port id="10" precision="U8">
|
407 |
+
<dim>491359</dim>
|
408 |
+
</port>
|
409 |
+
<port id="11" precision="I32">
|
410 |
+
<dim>151387</dim>
|
411 |
+
</port>
|
412 |
+
<port id="12" precision="I32">
|
413 |
+
<dim>151387</dim>
|
414 |
+
</port>
|
415 |
+
<port id="13" precision="U8">
|
416 |
+
<dim>484354</dim>
|
417 |
+
</port>
|
418 |
+
<port id="14" precision="I32">
|
419 |
+
<dim>31</dim>
|
420 |
+
</port>
|
421 |
+
<port id="15" precision="I32">
|
422 |
+
<dim>31</dim>
|
423 |
+
</port>
|
424 |
+
<port id="16" precision="U8">
|
425 |
+
<dim>353</dim>
|
426 |
+
</port>
|
427 |
+
<port id="17" precision="I32">
|
428 |
+
<dim>31</dim>
|
429 |
+
</port>
|
430 |
+
</input>
|
431 |
+
<output>
|
432 |
+
<port id="18" precision="I32">
|
433 |
+
<dim>-1</dim>
|
434 |
+
</port>
|
435 |
+
<port id="19" precision="I32">
|
436 |
+
<dim>-1</dim>
|
437 |
+
</port>
|
438 |
+
<port id="20" precision="I32">
|
439 |
+
<dim>-1</dim>
|
440 |
+
</port>
|
441 |
+
</output>
|
442 |
+
</layer>
|
443 |
+
<layer id="33" name="Subtract_1631027" type="Subtract" version="opset1">
|
444 |
+
<data auto_broadcast="numpy" />
|
445 |
+
<input>
|
446 |
+
<port id="0" precision="I32">
|
447 |
+
<dim>-1</dim>
|
448 |
+
</port>
|
449 |
+
<port id="1" precision="I32">
|
450 |
+
<dim>-1</dim>
|
451 |
+
</port>
|
452 |
+
</input>
|
453 |
+
<output>
|
454 |
+
<port id="2" precision="I32">
|
455 |
+
<dim>-1</dim>
|
456 |
+
</port>
|
457 |
+
</output>
|
458 |
+
</layer>
|
459 |
+
<layer id="34" name="Constant_1631028" type="Const" version="opset1">
|
460 |
+
<data element_type="i32" shape="" offset="5589022" size="4" />
|
461 |
+
<output>
|
462 |
+
<port id="0" precision="I32" />
|
463 |
+
</output>
|
464 |
+
</layer>
|
465 |
+
<layer id="35" name="Minimum_1631029" type="Minimum" version="opset1">
|
466 |
+
<data auto_broadcast="numpy" />
|
467 |
+
<input>
|
468 |
+
<port id="0" precision="I32">
|
469 |
+
<dim>-1</dim>
|
470 |
+
</port>
|
471 |
+
<port id="1" precision="I32" />
|
472 |
+
</input>
|
473 |
+
<output>
|
474 |
+
<port id="2" precision="I32">
|
475 |
+
<dim>-1</dim>
|
476 |
+
</port>
|
477 |
+
</output>
|
478 |
+
</layer>
|
479 |
+
<layer id="36" name="Subtract_1631030" type="Subtract" version="opset1">
|
480 |
+
<data auto_broadcast="numpy" />
|
481 |
+
<input>
|
482 |
+
<port id="0" precision="I32">
|
483 |
+
<dim>-1</dim>
|
484 |
+
</port>
|
485 |
+
<port id="1" precision="I32">
|
486 |
+
<dim>-1</dim>
|
487 |
+
</port>
|
488 |
+
</input>
|
489 |
+
<output>
|
490 |
+
<port id="2" precision="I32">
|
491 |
+
<dim>-1</dim>
|
492 |
+
</port>
|
493 |
+
</output>
|
494 |
+
</layer>
|
495 |
+
<layer id="37" name="Subtract_1631031" type="Subtract" version="opset1">
|
496 |
+
<data auto_broadcast="numpy" />
|
497 |
+
<input>
|
498 |
+
<port id="0" precision="I32">
|
499 |
+
<dim>-1</dim>
|
500 |
+
</port>
|
501 |
+
<port id="1" precision="I32">
|
502 |
+
<dim>-1</dim>
|
503 |
+
</port>
|
504 |
+
</input>
|
505 |
+
<output>
|
506 |
+
<port id="2" precision="I32">
|
507 |
+
<dim>-1</dim>
|
508 |
+
</port>
|
509 |
+
</output>
|
510 |
+
</layer>
|
511 |
+
<layer id="38" name="Constant_1631032" type="Const" version="opset1">
|
512 |
+
<data element_type="i32" shape="" offset="5589026" size="4" />
|
513 |
+
<output>
|
514 |
+
<port id="0" precision="I32" />
|
515 |
+
</output>
|
516 |
+
</layer>
|
517 |
+
<layer id="39" name="ReduceMax_1631033" type="ReduceMax" version="opset1">
|
518 |
+
<data keep_dims="false" />
|
519 |
+
<input>
|
520 |
+
<port id="0" precision="I32">
|
521 |
+
<dim>-1</dim>
|
522 |
+
</port>
|
523 |
+
<port id="1" precision="I32" />
|
524 |
+
</input>
|
525 |
+
<output>
|
526 |
+
<port id="2" precision="I32" />
|
527 |
+
</output>
|
528 |
+
</layer>
|
529 |
+
<layer id="40" name="Constant_1631034" type="Const" version="opset1">
|
530 |
+
<data element_type="i32" shape="" offset="5589030" size="4" />
|
531 |
+
<output>
|
532 |
+
<port id="0" precision="I32" />
|
533 |
+
</output>
|
534 |
+
</layer>
|
535 |
+
<layer id="41" name="RaggedToDense_1631035" type="RaggedToDense" version="extension">
|
536 |
+
<data pad_right="false" m_pad_max_length="false" />
|
537 |
+
<input>
|
538 |
+
<port id="0" precision="I32">
|
539 |
+
<dim>-1</dim>
|
540 |
+
</port>
|
541 |
+
<port id="1" precision="I32">
|
542 |
+
<dim>-1</dim>
|
543 |
+
</port>
|
544 |
+
<port id="2" precision="I32">
|
545 |
+
<dim>-1</dim>
|
546 |
+
</port>
|
547 |
+
<port id="3" precision="I32" />
|
548 |
+
<port id="4" precision="I32" />
|
549 |
+
</input>
|
550 |
+
<output>
|
551 |
+
<port id="5" precision="I32">
|
552 |
+
<dim>-1</dim>
|
553 |
+
<dim>-1</dim>
|
554 |
+
</port>
|
555 |
+
<port id="6" precision="BOOL">
|
556 |
+
<dim>-1</dim>
|
557 |
+
<dim>-1</dim>
|
558 |
+
</port>
|
559 |
+
</output>
|
560 |
+
</layer>
|
561 |
+
<layer id="42" name="Convert_1631036" type="Convert" version="opset1">
|
562 |
+
<data destination_type="i32" />
|
563 |
+
<input>
|
564 |
+
<port id="0" precision="BOOL">
|
565 |
+
<dim>-1</dim>
|
566 |
+
<dim>-1</dim>
|
567 |
+
</port>
|
568 |
+
</input>
|
569 |
+
<output>
|
570 |
+
<port id="1" precision="I32">
|
571 |
+
<dim>-1</dim>
|
572 |
+
<dim>-1</dim>
|
573 |
+
</port>
|
574 |
+
</output>
|
575 |
+
</layer>
|
576 |
+
<layer id="43" name="Convert_1631036.0" type="Convert" version="opset1">
|
577 |
+
<data destination_type="i64" />
|
578 |
+
<input>
|
579 |
+
<port id="0" precision="I32">
|
580 |
+
<dim>-1</dim>
|
581 |
+
<dim>-1</dim>
|
582 |
+
</port>
|
583 |
+
</input>
|
584 |
+
<output>
|
585 |
+
<port id="1" precision="I64" names="attention_mask">
|
586 |
+
<dim>-1</dim>
|
587 |
+
<dim>-1</dim>
|
588 |
+
</port>
|
589 |
+
</output>
|
590 |
+
</layer>
|
591 |
+
<layer id="45" name="RaggedToDense_1631035.0" type="Convert" version="opset1">
|
592 |
+
<data destination_type="i64" />
|
593 |
+
<input>
|
594 |
+
<port id="0" precision="I32">
|
595 |
+
<dim>-1</dim>
|
596 |
+
<dim>-1</dim>
|
597 |
+
</port>
|
598 |
+
</input>
|
599 |
+
<output>
|
600 |
+
<port id="1" precision="I64" names="input_ids">
|
601 |
+
<dim>-1</dim>
|
602 |
+
<dim>-1</dim>
|
603 |
+
</port>
|
604 |
+
</output>
|
605 |
+
</layer>
|
606 |
+
<layer id="46" name="Result_1631037" type="Result" version="opset1" output_names="input_ids">
|
607 |
+
<input>
|
608 |
+
<port id="0" precision="I64">
|
609 |
+
<dim>-1</dim>
|
610 |
+
<dim>-1</dim>
|
611 |
+
</port>
|
612 |
+
</input>
|
613 |
+
</layer>
|
614 |
+
<layer id="44" name="Result_1631038" type="Result" version="opset1" output_names="attention_mask">
|
615 |
+
<input>
|
616 |
+
<port id="0" precision="I64">
|
617 |
+
<dim>-1</dim>
|
618 |
+
<dim>-1</dim>
|
619 |
+
</port>
|
620 |
+
</input>
|
621 |
+
</layer>
|
622 |
+
</layers>
|
623 |
+
<edges>
|
624 |
+
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
|
625 |
+
<edge from-layer="1" from-port="0" to-layer="8" to-port="0" />
|
626 |
+
<edge from-layer="2" from-port="1" to-layer="3" to-port="0" />
|
627 |
+
<edge from-layer="2" from-port="3" to-layer="15" to-port="4" />
|
628 |
+
<edge from-layer="2" from-port="2" to-layer="15" to-port="3" />
|
629 |
+
<edge from-layer="2" from-port="1" to-layer="15" to-port="2" />
|
630 |
+
<edge from-layer="3" from-port="1" to-layer="6" to-port="0" />
|
631 |
+
<edge from-layer="4" from-port="0" to-layer="6" to-port="1" />
|
632 |
+
<edge from-layer="5" from-port="0" to-layer="6" to-port="2" />
|
633 |
+
<edge from-layer="6" from-port="3" to-layer="11" to-port="0" />
|
634 |
+
<edge from-layer="6" from-port="3" to-layer="8" to-port="1" />
|
635 |
+
<edge from-layer="7" from-port="0" to-layer="8" to-port="2" />
|
636 |
+
<edge from-layer="8" from-port="3" to-layer="15" to-port="0" />
|
637 |
+
<edge from-layer="9" from-port="0" to-layer="13" to-port="0" />
|
638 |
+
<edge from-layer="10" from-port="0" to-layer="11" to-port="1" />
|
639 |
+
<edge from-layer="11" from-port="2" to-layer="13" to-port="1" />
|
640 |
+
<edge from-layer="12" from-port="0" to-layer="13" to-port="2" />
|
641 |
+
<edge from-layer="13" from-port="3" to-layer="15" to-port="1" />
|
642 |
+
<edge from-layer="14" from-port="0" to-layer="15" to-port="5" />
|
643 |
+
<edge from-layer="15" from-port="8" to-layer="16" to-port="0" />
|
644 |
+
<edge from-layer="15" from-port="7" to-layer="18" to-port="1" />
|
645 |
+
<edge from-layer="15" from-port="6" to-layer="18" to-port="0" />
|
646 |
+
<edge from-layer="15" from-port="11" to-layer="16" to-port="3" />
|
647 |
+
<edge from-layer="15" from-port="10" to-layer="16" to-port="2" />
|
648 |
+
<edge from-layer="15" from-port="9" to-layer="16" to-port="1" />
|
649 |
+
<edge from-layer="16" from-port="4" to-layer="18" to-port="2" />
|
650 |
+
<edge from-layer="16" from-port="5" to-layer="18" to-port="3" />
|
651 |
+
<edge from-layer="16" from-port="6" to-layer="18" to-port="4" />
|
652 |
+
<edge from-layer="16" from-port="7" to-layer="18" to-port="5" />
|
653 |
+
<edge from-layer="17" from-port="0" to-layer="18" to-port="6" />
|
654 |
+
<edge from-layer="18" from-port="11" to-layer="32" to-port="4" />
|
655 |
+
<edge from-layer="18" from-port="10" to-layer="32" to-port="3" />
|
656 |
+
<edge from-layer="18" from-port="8" to-layer="32" to-port="1" />
|
657 |
+
<edge from-layer="18" from-port="7" to-layer="32" to-port="0" />
|
658 |
+
<edge from-layer="18" from-port="9" to-layer="32" to-port="2" />
|
659 |
+
<edge from-layer="19" from-port="0" to-layer="32" to-port="5" />
|
660 |
+
<edge from-layer="20" from-port="0" to-layer="32" to-port="6" />
|
661 |
+
<edge from-layer="21" from-port="0" to-layer="32" to-port="7" />
|
662 |
+
<edge from-layer="22" from-port="0" to-layer="32" to-port="8" />
|
663 |
+
<edge from-layer="23" from-port="0" to-layer="32" to-port="9" />
|
664 |
+
<edge from-layer="24" from-port="0" to-layer="32" to-port="10" />
|
665 |
+
<edge from-layer="25" from-port="0" to-layer="32" to-port="11" />
|
666 |
+
<edge from-layer="26" from-port="0" to-layer="32" to-port="12" />
|
667 |
+
<edge from-layer="27" from-port="0" to-layer="32" to-port="13" />
|
668 |
+
<edge from-layer="28" from-port="0" to-layer="32" to-port="14" />
|
669 |
+
<edge from-layer="29" from-port="0" to-layer="32" to-port="15" />
|
670 |
+
<edge from-layer="30" from-port="0" to-layer="32" to-port="16" />
|
671 |
+
<edge from-layer="31" from-port="0" to-layer="32" to-port="17" />
|
672 |
+
<edge from-layer="32" from-port="19" to-layer="33" to-port="0" />
|
673 |
+
<edge from-layer="32" from-port="18" to-layer="33" to-port="1" />
|
674 |
+
<edge from-layer="32" from-port="20" to-layer="41" to-port="2" />
|
675 |
+
<edge from-layer="32" from-port="19" to-layer="36" to-port="0" />
|
676 |
+
<edge from-layer="32" from-port="19" to-layer="41" to-port="1" />
|
677 |
+
<edge from-layer="32" from-port="19" to-layer="37" to-port="0" />
|
678 |
+
<edge from-layer="33" from-port="2" to-layer="35" to-port="0" />
|
679 |
+
<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
|
680 |
+
<edge from-layer="35" from-port="2" to-layer="36" to-port="1" />
|
681 |
+
<edge from-layer="36" from-port="2" to-layer="37" to-port="1" />
|
682 |
+
<edge from-layer="36" from-port="2" to-layer="41" to-port="0" />
|
683 |
+
<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
|
684 |
+
<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
|
685 |
+
<edge from-layer="39" from-port="2" to-layer="41" to-port="3" />
|
686 |
+
<edge from-layer="40" from-port="0" to-layer="41" to-port="4" />
|
687 |
+
<edge from-layer="41" from-port="6" to-layer="42" to-port="0" />
|
688 |
+
<edge from-layer="41" from-port="5" to-layer="45" to-port="0" />
|
689 |
+
<edge from-layer="42" from-port="1" to-layer="43" to-port="0" />
|
690 |
+
<edge from-layer="43" from-port="1" to-layer="44" to-port="0" />
|
691 |
+
<edge from-layer="45" from-port="1" to-layer="46" to-port="0" />
|
692 |
+
</edges>
|
693 |
+
<rt_info>
|
694 |
+
<add_attention_mask value="True" />
|
695 |
+
<add_prefix_space />
|
696 |
+
<add_special_tokens value="True" />
|
697 |
+
<chat_template value="{%- if tools %} {{- '<|im_start|>system\n' }} {%- if messages[0]['role'] == 'system' %} {{- messages[0]['content'] }} {%- else %} {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }} {%- endif %} {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }} {%- for tool in tools %} {{- "\n" }} {{- tool | tojson }} {%- endfor %} {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }} {%- else %} {%- if messages[0]['role'] == 'system' %} {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }} {%- else %} {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }} {%- endif %} {%- endif %} {%- for message in messages %} {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %} {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }} {%- elif message.role == "assistant" %} {{- '<|im_start|>' + message.role }} {%- if message.content %} {{- '\n' + message.content }} {%- endif %} {%- for tool_call in message.tool_calls %} {%- if tool_call.function is defined %} {%- set tool_call = tool_call.function %} {%- endif %} {{- '\n<tool_call>\n{"name": "' }} {{- tool_call.name }} {{- '", "arguments": ' }} {{- tool_call.arguments | tojson }} {{- '}\n</tool_call>' }} {%- endfor %} {{- '<|im_end|>\n' }} {%- elif message.role == "tool" %} {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %} {{- '<|im_start|>user' }} {%- endif %} {{- '\n<tool_response>\n' }} {{- message.content }} {{- '\n</tool_response>' }} {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %} {{- '<|im_end|>\n' }} {%- endif %} {%- endif %} {%- endfor %} {%- if add_generation_prompt %} {{- '<|im_start|>assistant\n' }} {%- endif %} " />
|
698 |
+
<clean_up_tokenization_spaces />
|
699 |
+
<detokenizer_input_type value="i64" />
|
700 |
+
<eos_token_id value="151645" />
|
701 |
+
<handle_special_tokens_with_re />
|
702 |
+
<max_length />
|
703 |
+
<number_of_inputs value="1" />
|
704 |
+
<openvino_tokenizers_version value="2025.1.0.0-523-710ddf14de8" />
|
705 |
+
<openvino_version value="2025.1.0-18503-6fec06580ab-releases/2025/1" />
|
706 |
+
<original_post_processor_template value="{"type": "ByteLevel", "add_prefix_space": false, "trim_offsets": false, "use_regex": false}" />
|
707 |
+
<original_tokenizer_class value="<class 'transformers.models.qwen2.tokenization_qwen2_fast.Qwen2TokenizerFast'>" />
|
708 |
+
<pad_token_id value="151643" />
|
709 |
+
<sentencepiece_version value="0.2.0" />
|
710 |
+
<skip_special_tokens value="True" />
|
711 |
+
<streaming_detokenizer value="False" />
|
712 |
+
<tiktoken_version value="0.9.0" />
|
713 |
+
<tokenizer_output_type value="i64" />
|
714 |
+
<tokenizers_version value="0.21.1" />
|
715 |
+
<transformers_version value="4.52.3" />
|
716 |
+
<use_max_padding value="False" />
|
717 |
+
<use_sentencepiece_backend value="False" />
|
718 |
+
<utf8_replace_mode value="replace" />
|
719 |
+
<with_detokenizer value="True" />
|
720 |
+
</rt_info>
|
721 |
+
</net>
|
openvino_vision_embeddings_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7c691f192dfe3d77922acdd9ef03dfae49bf7efdf9315a4fa00930d1a11b0372
|
3 |
+
size 317217188
|
openvino_vision_embeddings_model.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|
preprocessor_config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"crop_size": {
|
3 |
+
"height": 448,
|
4 |
+
"width": 448
|
5 |
+
},
|
6 |
+
"do_center_crop": true,
|
7 |
+
"do_convert_rgb": true,
|
8 |
+
"do_normalize": true,
|
9 |
+
"do_rescale": true,
|
10 |
+
"do_resize": true,
|
11 |
+
"image_mean": [
|
12 |
+
0.485,
|
13 |
+
0.456,
|
14 |
+
0.406
|
15 |
+
],
|
16 |
+
"image_processor_type": "CLIPImageProcessor",
|
17 |
+
"image_std": [
|
18 |
+
0.229,
|
19 |
+
0.224,
|
20 |
+
0.225
|
21 |
+
],
|
22 |
+
"resample": 3,
|
23 |
+
"rescale_factor": 0.00392156862745098,
|
24 |
+
"size": {
|
25 |
+
"shortest_edge": 448
|
26 |
+
}
|
27 |
+
}
|
special_tokens_map.json
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<|im_start|>",
|
4 |
+
"<|im_end|>",
|
5 |
+
"<|object_ref_start|>",
|
6 |
+
"<|object_ref_end|>",
|
7 |
+
"<|box_start|>",
|
8 |
+
"<|box_end|>",
|
9 |
+
"<|quad_start|>",
|
10 |
+
"<|quad_end|>",
|
11 |
+
"<|vision_start|>",
|
12 |
+
"<|vision_end|>",
|
13 |
+
"<|vision_pad|>",
|
14 |
+
"<|image_pad|>",
|
15 |
+
"<|video_pad|>"
|
16 |
+
],
|
17 |
+
"eos_token": {
|
18 |
+
"content": "<|im_end|>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
},
|
24 |
+
"pad_token": {
|
25 |
+
"content": "<|endoftext|>",
|
26 |
+
"lstrip": false,
|
27 |
+
"normalized": false,
|
28 |
+
"rstrip": false,
|
29 |
+
"single_word": false
|
30 |
+
}
|
31 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6f9ba4b4a6625b5047a1356f6081b641c3e4e6a4a198facbd4bef217747d1685
|
3 |
+
size 11423548
|
tokenizer_config.json
ADDED
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": false,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": false,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"151643": {
|
7 |
+
"content": "<|endoftext|>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"151644": {
|
15 |
+
"content": "<|im_start|>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"151645": {
|
23 |
+
"content": "<|im_end|>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"151646": {
|
31 |
+
"content": "<|object_ref_start|>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"151647": {
|
39 |
+
"content": "<|object_ref_end|>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
},
|
46 |
+
"151648": {
|
47 |
+
"content": "<|box_start|>",
|
48 |
+
"lstrip": false,
|
49 |
+
"normalized": false,
|
50 |
+
"rstrip": false,
|
51 |
+
"single_word": false,
|
52 |
+
"special": true
|
53 |
+
},
|
54 |
+
"151649": {
|
55 |
+
"content": "<|box_end|>",
|
56 |
+
"lstrip": false,
|
57 |
+
"normalized": false,
|
58 |
+
"rstrip": false,
|
59 |
+
"single_word": false,
|
60 |
+
"special": true
|
61 |
+
},
|
62 |
+
"151650": {
|
63 |
+
"content": "<|quad_start|>",
|
64 |
+
"lstrip": false,
|
65 |
+
"normalized": false,
|
66 |
+
"rstrip": false,
|
67 |
+
"single_word": false,
|
68 |
+
"special": true
|
69 |
+
},
|
70 |
+
"151651": {
|
71 |
+
"content": "<|quad_end|>",
|
72 |
+
"lstrip": false,
|
73 |
+
"normalized": false,
|
74 |
+
"rstrip": false,
|
75 |
+
"single_word": false,
|
76 |
+
"special": true
|
77 |
+
},
|
78 |
+
"151652": {
|
79 |
+
"content": "<|vision_start|>",
|
80 |
+
"lstrip": false,
|
81 |
+
"normalized": false,
|
82 |
+
"rstrip": false,
|
83 |
+
"single_word": false,
|
84 |
+
"special": true
|
85 |
+
},
|
86 |
+
"151653": {
|
87 |
+
"content": "<|vision_end|>",
|
88 |
+
"lstrip": false,
|
89 |
+
"normalized": false,
|
90 |
+
"rstrip": false,
|
91 |
+
"single_word": false,
|
92 |
+
"special": true
|
93 |
+
},
|
94 |
+
"151654": {
|
95 |
+
"content": "<|vision_pad|>",
|
96 |
+
"lstrip": false,
|
97 |
+
"normalized": false,
|
98 |
+
"rstrip": false,
|
99 |
+
"single_word": false,
|
100 |
+
"special": true
|
101 |
+
},
|
102 |
+
"151655": {
|
103 |
+
"content": "<|image_pad|>",
|
104 |
+
"lstrip": false,
|
105 |
+
"normalized": false,
|
106 |
+
"rstrip": false,
|
107 |
+
"single_word": false,
|
108 |
+
"special": true
|
109 |
+
},
|
110 |
+
"151656": {
|
111 |
+
"content": "<|video_pad|>",
|
112 |
+
"lstrip": false,
|
113 |
+
"normalized": false,
|
114 |
+
"rstrip": false,
|
115 |
+
"single_word": false,
|
116 |
+
"special": true
|
117 |
+
},
|
118 |
+
"151657": {
|
119 |
+
"content": "<tool_call>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false,
|
124 |
+
"special": false
|
125 |
+
},
|
126 |
+
"151658": {
|
127 |
+
"content": "</tool_call>",
|
128 |
+
"lstrip": false,
|
129 |
+
"normalized": false,
|
130 |
+
"rstrip": false,
|
131 |
+
"single_word": false,
|
132 |
+
"special": false
|
133 |
+
},
|
134 |
+
"151659": {
|
135 |
+
"content": "<|fim_prefix|>",
|
136 |
+
"lstrip": false,
|
137 |
+
"normalized": false,
|
138 |
+
"rstrip": false,
|
139 |
+
"single_word": false,
|
140 |
+
"special": false
|
141 |
+
},
|
142 |
+
"151660": {
|
143 |
+
"content": "<|fim_middle|>",
|
144 |
+
"lstrip": false,
|
145 |
+
"normalized": false,
|
146 |
+
"rstrip": false,
|
147 |
+
"single_word": false,
|
148 |
+
"special": false
|
149 |
+
},
|
150 |
+
"151661": {
|
151 |
+
"content": "<|fim_suffix|>",
|
152 |
+
"lstrip": false,
|
153 |
+
"normalized": false,
|
154 |
+
"rstrip": false,
|
155 |
+
"single_word": false,
|
156 |
+
"special": false
|
157 |
+
},
|
158 |
+
"151662": {
|
159 |
+
"content": "<|fim_pad|>",
|
160 |
+
"lstrip": false,
|
161 |
+
"normalized": false,
|
162 |
+
"rstrip": false,
|
163 |
+
"single_word": false,
|
164 |
+
"special": false
|
165 |
+
},
|
166 |
+
"151663": {
|
167 |
+
"content": "<|repo_name|>",
|
168 |
+
"lstrip": false,
|
169 |
+
"normalized": false,
|
170 |
+
"rstrip": false,
|
171 |
+
"single_word": false,
|
172 |
+
"special": false
|
173 |
+
},
|
174 |
+
"151664": {
|
175 |
+
"content": "<|file_sep|>",
|
176 |
+
"lstrip": false,
|
177 |
+
"normalized": false,
|
178 |
+
"rstrip": false,
|
179 |
+
"single_word": false,
|
180 |
+
"special": false
|
181 |
+
},
|
182 |
+
"151665": {
|
183 |
+
"content": "<img>",
|
184 |
+
"lstrip": false,
|
185 |
+
"normalized": false,
|
186 |
+
"rstrip": false,
|
187 |
+
"single_word": false,
|
188 |
+
"special": true
|
189 |
+
},
|
190 |
+
"151666": {
|
191 |
+
"content": "</img>",
|
192 |
+
"lstrip": false,
|
193 |
+
"normalized": false,
|
194 |
+
"rstrip": false,
|
195 |
+
"single_word": false,
|
196 |
+
"special": true
|
197 |
+
},
|
198 |
+
"151667": {
|
199 |
+
"content": "<IMG_CONTEXT>",
|
200 |
+
"lstrip": false,
|
201 |
+
"normalized": false,
|
202 |
+
"rstrip": false,
|
203 |
+
"single_word": false,
|
204 |
+
"special": true
|
205 |
+
},
|
206 |
+
"151668": {
|
207 |
+
"content": "<quad>",
|
208 |
+
"lstrip": false,
|
209 |
+
"normalized": false,
|
210 |
+
"rstrip": false,
|
211 |
+
"single_word": false,
|
212 |
+
"special": true
|
213 |
+
},
|
214 |
+
"151669": {
|
215 |
+
"content": "</quad>",
|
216 |
+
"lstrip": false,
|
217 |
+
"normalized": false,
|
218 |
+
"rstrip": false,
|
219 |
+
"single_word": false,
|
220 |
+
"special": true
|
221 |
+
},
|
222 |
+
"151670": {
|
223 |
+
"content": "<ref>",
|
224 |
+
"lstrip": false,
|
225 |
+
"normalized": false,
|
226 |
+
"rstrip": false,
|
227 |
+
"single_word": false,
|
228 |
+
"special": true
|
229 |
+
},
|
230 |
+
"151671": {
|
231 |
+
"content": "</ref>",
|
232 |
+
"lstrip": false,
|
233 |
+
"normalized": false,
|
234 |
+
"rstrip": false,
|
235 |
+
"single_word": false,
|
236 |
+
"special": true
|
237 |
+
},
|
238 |
+
"151672": {
|
239 |
+
"content": "<box>",
|
240 |
+
"lstrip": false,
|
241 |
+
"normalized": false,
|
242 |
+
"rstrip": false,
|
243 |
+
"single_word": false,
|
244 |
+
"special": true
|
245 |
+
},
|
246 |
+
"151673": {
|
247 |
+
"content": "</box>",
|
248 |
+
"lstrip": false,
|
249 |
+
"normalized": false,
|
250 |
+
"rstrip": false,
|
251 |
+
"single_word": false,
|
252 |
+
"special": true
|
253 |
+
}
|
254 |
+
},
|
255 |
+
"additional_special_tokens": [
|
256 |
+
"<|im_start|>",
|
257 |
+
"<|im_end|>",
|
258 |
+
"<|object_ref_start|>",
|
259 |
+
"<|object_ref_end|>",
|
260 |
+
"<|box_start|>",
|
261 |
+
"<|box_end|>",
|
262 |
+
"<|quad_start|>",
|
263 |
+
"<|quad_end|>",
|
264 |
+
"<|vision_start|>",
|
265 |
+
"<|vision_end|>",
|
266 |
+
"<|vision_pad|>",
|
267 |
+
"<|image_pad|>",
|
268 |
+
"<|video_pad|>"
|
269 |
+
],
|
270 |
+
"bos_token": null,
|
271 |
+
"clean_up_tokenization_spaces": false,
|
272 |
+
"eos_token": "<|im_end|>",
|
273 |
+
"errors": "replace",
|
274 |
+
"extra_special_tokens": {},
|
275 |
+
"model_max_length": 12288,
|
276 |
+
"pad_token": "<|endoftext|>",
|
277 |
+
"split_special_tokens": false,
|
278 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
279 |
+
"unk_token": null
|
280 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|