openvino-ci commited on
Commit
d0f1573
1 Parent(s): a5f6cc4

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ license_link: https://choosealicense.com/licenses/apache-2.0/
4
+ ---
5
+ # pythia-12b-int8-ov
6
+ * Model creator: [Eleutherai](https://huggingface.co/EleutherAI)
7
+ * Original model: [pythia-12b](https://huggingface.co/EleutherAI/pythia-12b)
8
+
9
+ ## Description
10
+ This is [pythia-12b](https://huggingface.co/EleutherAI/pythia-12b) model converted to the [OpenVINO™ IR](https://docs.openvino.ai/2024/documentation/openvino-ir-format.html) (Intermediate Representation) format with weights compressed to INT8 by [NNCF](https://github.com/openvinotoolkit/nncf).
11
+
12
+ ## Quantization Parameters
13
+
14
+ Weight compression was performed using `nncf.compress_weights` with the following parameters:
15
+
16
+ * mode: **int8_asym**
17
+ * ratio: **1**
18
+
19
+ For more information on quantization, check the [OpenVINO model optimization guide](https://docs.openvino.ai/2024/openvino-workflow/model-optimization-guide/weight-compression.html).
20
+
21
+
22
+ ## Compatibility
23
+
24
+ The provided OpenVINO™ IR model is compatible with:
25
+
26
+ * OpenVINO version 2024.1.0 and higher
27
+ * Optimum Intel 1.16.0 and higher
28
+
29
+ ## Running Model Inference
30
+
31
+ 1. Install packages required for using [Optimum Intel](https://huggingface.co/docs/optimum/intel/index) integration with the OpenVINO backend:
32
+
33
+ ```
34
+ pip install optimum[openvino]
35
+ ```
36
+
37
+ 2. Run model inference:
38
+
39
+ ```
40
+ from transformers import AutoTokenizer
41
+ from optimum.intel.openvino import OVModelForCausalLM
42
+
43
+ model_id = "OpenVINO/pythia-12b-int8-ov"
44
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
45
+ model = OVModelForCausalLM.from_pretrained(model_id)
46
+
47
+ inputs = tokenizer("What is OpenVINO?", return_tensors="pt")
48
+
49
+ outputs = model.generate(**inputs, max_length=200)
50
+ text = tokenizer.batch_decode(outputs)[0]
51
+ print(text)
52
+ ```
53
+
54
+ For more examples and possible optimizations, refer to the [OpenVINO Large Language Model Inference Guide](https://docs.openvino.ai/2024/learn-openvino/llm_inference_guide.html).
55
+
56
+ ## Limitations
57
+
58
+ Check the original model card for [limitations]().
59
+
60
+ ## Legal information
61
+
62
+ The original model is distributed under [apache-2.0](https://choosealicense.com/licenses/apache-2.0/) license. More details can be found in [original model card](https://huggingface.co/EleutherAI/pythia-12b).
63
+
64
+ ## Disclaimer
65
+
66
+ Intel is committed to respecting human rights and avoiding causing or contributing to adverse impacts on human rights. See [Intel’s Global Human Rights Principles](https://www.intel.com/content/dam/www/central-libraries/us/en/documents/policy-human-rights.pdf). Intel’s products and software are intended only to be used in applications that do not cause or contribute to adverse impacts on human rights.
config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "EleutherAI/pythia-12b",
3
+ "architectures": [
4
+ "MPTForCausalLM"
5
+ ],
6
+ "attn_config": {
7
+ "model_type": ""
8
+ },
9
+ "auto_map": {
10
+ "AutoConfig": "Intel/neural-chat-7b-v1-1--configuration_mpt.MPTConfig",
11
+ "AutoModelForCausalLM": "Intel/neural-chat-7b-v1-1--modeling_mpt.MPTForCausalLM"
12
+ },
13
+ "d_model": 4096,
14
+ "emb_pdrop": 0,
15
+ "embedding_fraction": 1.0,
16
+ "expansion_ratio": 4,
17
+ "init_config": {
18
+ "emb_init_std": null,
19
+ "emb_init_uniform_lim": null,
20
+ "fan_mode": "fan_in",
21
+ "init_div_is_residual": true,
22
+ "init_gain": 0,
23
+ "init_nonlinearity": "relu",
24
+ "init_std": 0.02,
25
+ "name": "kaiming_normal_",
26
+ "verbose": 0
27
+ },
28
+ "init_device": "cpu",
29
+ "initializer_range": 0.02,
30
+ "layer_norm_epsilon": 1e-05,
31
+ "learned_pos_emb": true,
32
+ "logit_scale": null,
33
+ "max_seq_len": 2048,
34
+ "model_type": "mpt",
35
+ "n_heads": 32,
36
+ "n_layers": 32,
37
+ "no_bias": true,
38
+ "norm_type": "low_precision_layernorm",
39
+ "resid_pdrop": 0,
40
+ "tokenizer_name": "EleutherAI/gpt-neox-20b",
41
+ "transformers_version": "4.42.4",
42
+ "use_cache": false,
43
+ "verbose": 0,
44
+ "vocab_size": 50279
45
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "eos_token_id": 0,
4
+ "transformers_version": "4.42.4",
5
+ "use_cache": false
6
+ }
openvino_detokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb1aaf54e3666ecdf8dfbf33cbd01af2f1d5d43bc0b96d7ad223a5ba08b63035
3
+ size 558476
openvino_detokenizer.xml ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="detokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_234266" type="Parameter" version="opset1">
5
+ <data shape="?,?" element_type="i64" />
6
+ <output>
7
+ <port id="0" precision="I64" names="Parameter_234266">
8
+ <dim>-1</dim>
9
+ <dim>-1</dim>
10
+ </port>
11
+ </output>
12
+ </layer>
13
+ <layer id="1" name="Convert_234282" type="Convert" version="opset1">
14
+ <data destination_type="i32" />
15
+ <input>
16
+ <port id="0" precision="I64">
17
+ <dim>-1</dim>
18
+ <dim>-1</dim>
19
+ </port>
20
+ </input>
21
+ <output>
22
+ <port id="1" precision="I32">
23
+ <dim>-1</dim>
24
+ <dim>-1</dim>
25
+ </port>
26
+ </output>
27
+ </layer>
28
+ <layer id="2" name="Constant_234164" type="Const" version="opset1">
29
+ <data element_type="u8" shape="558427" offset="0" size="558427" />
30
+ <output>
31
+ <port id="0" precision="U8">
32
+ <dim>558427</dim>
33
+ </port>
34
+ </output>
35
+ </layer>
36
+ <layer id="3" name="StringTensorUnpack_234165" type="StringTensorUnpack" version="extension">
37
+ <data mode="begins_ends" />
38
+ <input>
39
+ <port id="0" precision="U8">
40
+ <dim>558427</dim>
41
+ </port>
42
+ </input>
43
+ <output>
44
+ <port id="1" precision="I32">
45
+ <dim>-1</dim>
46
+ </port>
47
+ <port id="2" precision="I32">
48
+ <dim>-1</dim>
49
+ </port>
50
+ <port id="3" precision="U8">
51
+ <dim>-1</dim>
52
+ </port>
53
+ </output>
54
+ </layer>
55
+ <layer id="4" name="VocabDecoder_234267" type="VocabDecoder" version="extension">
56
+ <data skip_tokens="0, 1, 50277, 50278" />
57
+ <input>
58
+ <port id="0" precision="I32">
59
+ <dim>-1</dim>
60
+ <dim>-1</dim>
61
+ </port>
62
+ <port id="1" precision="I32">
63
+ <dim>-1</dim>
64
+ </port>
65
+ <port id="2" precision="I32">
66
+ <dim>-1</dim>
67
+ </port>
68
+ <port id="3" precision="U8">
69
+ <dim>-1</dim>
70
+ </port>
71
+ </input>
72
+ <output>
73
+ <port id="4" precision="I32">
74
+ <dim>-1</dim>
75
+ </port>
76
+ <port id="5" precision="I32">
77
+ <dim>-1</dim>
78
+ </port>
79
+ <port id="6" precision="I32">
80
+ <dim>-1</dim>
81
+ </port>
82
+ <port id="7" precision="I32">
83
+ <dim>-1</dim>
84
+ </port>
85
+ <port id="8" precision="U8">
86
+ <dim>-1</dim>
87
+ </port>
88
+ </output>
89
+ </layer>
90
+ <layer id="5" name="CharsToBytes_234268" type="CharsToBytes" version="extension">
91
+ <input>
92
+ <port id="0" precision="I32">
93
+ <dim>-1</dim>
94
+ </port>
95
+ <port id="1" precision="I32">
96
+ <dim>-1</dim>
97
+ </port>
98
+ <port id="2" precision="I32">
99
+ <dim>-1</dim>
100
+ </port>
101
+ <port id="3" precision="I32">
102
+ <dim>-1</dim>
103
+ </port>
104
+ <port id="4" precision="U8">
105
+ <dim>-1</dim>
106
+ </port>
107
+ </input>
108
+ <output>
109
+ <port id="5" precision="I32">
110
+ <dim>-1</dim>
111
+ </port>
112
+ <port id="6" precision="I32">
113
+ <dim>-1</dim>
114
+ </port>
115
+ <port id="7" precision="U8">
116
+ <dim>-1</dim>
117
+ </port>
118
+ </output>
119
+ </layer>
120
+ <layer id="6" name="Constant_234270" type="Const" version="opset1">
121
+ <data element_type="u8" shape="47" offset="558427" size="47" />
122
+ <output>
123
+ <port id="0" precision="U8">
124
+ <dim>47</dim>
125
+ </port>
126
+ </output>
127
+ </layer>
128
+ <layer id="7" name="Constant_234272" type="Const" version="opset1">
129
+ <data element_type="u8" shape="2" offset="558474" size="2" />
130
+ <output>
131
+ <port id="0" precision="U8">
132
+ <dim>2</dim>
133
+ </port>
134
+ </output>
135
+ </layer>
136
+ <layer id="8" name="RegexNormalization_234273" type="RegexNormalization" version="extension">
137
+ <data global_replace="true" />
138
+ <input>
139
+ <port id="0" precision="I32">
140
+ <dim>-1</dim>
141
+ </port>
142
+ <port id="1" precision="I32">
143
+ <dim>-1</dim>
144
+ </port>
145
+ <port id="2" precision="U8">
146
+ <dim>-1</dim>
147
+ </port>
148
+ <port id="3" precision="U8">
149
+ <dim>47</dim>
150
+ </port>
151
+ <port id="4" precision="U8">
152
+ <dim>2</dim>
153
+ </port>
154
+ </input>
155
+ <output>
156
+ <port id="5" precision="I32">
157
+ <dim>-1</dim>
158
+ </port>
159
+ <port id="6" precision="I32">
160
+ <dim>-1</dim>
161
+ </port>
162
+ <port id="7" precision="U8">
163
+ <dim>-1</dim>
164
+ </port>
165
+ </output>
166
+ </layer>
167
+ <layer id="9" name="StringTensorPack_234274" type="StringTensorPack" version="extension">
168
+ <data mode="begins_ends" />
169
+ <input>
170
+ <port id="0" precision="I32">
171
+ <dim>-1</dim>
172
+ </port>
173
+ <port id="1" precision="I32">
174
+ <dim>-1</dim>
175
+ </port>
176
+ <port id="2" precision="U8">
177
+ <dim>-1</dim>
178
+ </port>
179
+ </input>
180
+ <output>
181
+ <port id="3" precision="STRING" names="string_output">
182
+ <dim>-1</dim>
183
+ </port>
184
+ </output>
185
+ </layer>
186
+ <layer id="10" name="Result_234275" type="Result" version="opset1">
187
+ <input>
188
+ <port id="0" precision="STRING">
189
+ <dim>-1</dim>
190
+ </port>
191
+ </input>
192
+ </layer>
193
+ </layers>
194
+ <edges>
195
+ <edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
196
+ <edge from-layer="1" from-port="1" to-layer="4" to-port="0" />
197
+ <edge from-layer="2" from-port="0" to-layer="3" to-port="0" />
198
+ <edge from-layer="3" from-port="1" to-layer="4" to-port="1" />
199
+ <edge from-layer="3" from-port="2" to-layer="4" to-port="2" />
200
+ <edge from-layer="3" from-port="3" to-layer="4" to-port="3" />
201
+ <edge from-layer="4" from-port="8" to-layer="5" to-port="4" />
202
+ <edge from-layer="4" from-port="7" to-layer="5" to-port="3" />
203
+ <edge from-layer="4" from-port="6" to-layer="5" to-port="2" />
204
+ <edge from-layer="4" from-port="5" to-layer="5" to-port="1" />
205
+ <edge from-layer="4" from-port="4" to-layer="5" to-port="0" />
206
+ <edge from-layer="5" from-port="5" to-layer="8" to-port="0" />
207
+ <edge from-layer="5" from-port="6" to-layer="8" to-port="1" />
208
+ <edge from-layer="5" from-port="7" to-layer="8" to-port="2" />
209
+ <edge from-layer="6" from-port="0" to-layer="8" to-port="3" />
210
+ <edge from-layer="7" from-port="0" to-layer="8" to-port="4" />
211
+ <edge from-layer="8" from-port="5" to-layer="9" to-port="0" />
212
+ <edge from-layer="8" from-port="6" to-layer="9" to-port="1" />
213
+ <edge from-layer="8" from-port="7" to-layer="9" to-port="2" />
214
+ <edge from-layer="9" from-port="3" to-layer="10" to-port="0" />
215
+ </edges>
216
+ <rt_info>
217
+ <eos_token_id value="0" />
218
+ </rt_info>
219
+ </net>
openvino_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af836a8bad1ab3afd9ad0c5daa3ecc4cd2b483f50334a1b659fd2cba4c61355e
3
+ size 6653410925
openvino_model.xml ADDED
The diff for this file is too large to render. See raw diff
 
openvino_tokenizer.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f324126e3d158de43905ff243cdeb10349d1fe4c4683f7cad94c07b8dfa04b90
3
+ size 1166315
openvino_tokenizer.xml ADDED
@@ -0,0 +1,943 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <net name="tokenizer" version="11">
3
+ <layers>
4
+ <layer id="0" name="Parameter_234076" type="Parameter" version="opset1">
5
+ <data shape="?" element_type="string" />
6
+ <output>
7
+ <port id="0" precision="STRING" names="Parameter_234076">
8
+ <dim>-1</dim>
9
+ </port>
10
+ </output>
11
+ </layer>
12
+ <layer id="1" name="Constant_234083" type="Const" version="opset1">
13
+ <data element_type="i64" shape="" offset="0" size="8" />
14
+ <output>
15
+ <port id="0" precision="I64" />
16
+ </output>
17
+ </layer>
18
+ <layer id="2" name="StringTensorUnpack_234077" type="StringTensorUnpack" version="extension">
19
+ <data mode="begins_ends" />
20
+ <input>
21
+ <port id="0" precision="STRING">
22
+ <dim>-1</dim>
23
+ </port>
24
+ </input>
25
+ <output>
26
+ <port id="1" precision="I32">
27
+ <dim>-1</dim>
28
+ </port>
29
+ <port id="2" precision="I32">
30
+ <dim>-1</dim>
31
+ </port>
32
+ <port id="3" precision="U8">
33
+ <dim>-1</dim>
34
+ </port>
35
+ </output>
36
+ </layer>
37
+ <layer id="3" name="NormalizeUnicode_234078" type="NormalizeUnicode" version="extension">
38
+ <data normalization_form="NFC" />
39
+ <input>
40
+ <port id="0" precision="I32">
41
+ <dim>-1</dim>
42
+ </port>
43
+ <port id="1" precision="I32">
44
+ <dim>-1</dim>
45
+ </port>
46
+ <port id="2" precision="U8">
47
+ <dim>-1</dim>
48
+ </port>
49
+ </input>
50
+ <output>
51
+ <port id="3" precision="I32">
52
+ <dim>-1</dim>
53
+ </port>
54
+ <port id="4" precision="I32">
55
+ <dim>-1</dim>
56
+ </port>
57
+ <port id="5" precision="U8">
58
+ <dim>-1</dim>
59
+ </port>
60
+ </output>
61
+ </layer>
62
+ <layer id="4" name="ShapeOf_234079" type="ShapeOf" version="opset3">
63
+ <data output_type="i64" />
64
+ <input>
65
+ <port id="0" precision="I32">
66
+ <dim>-1</dim>
67
+ </port>
68
+ </input>
69
+ <output>
70
+ <port id="1" precision="I64">
71
+ <dim>1</dim>
72
+ </port>
73
+ </output>
74
+ </layer>
75
+ <layer id="5" name="Constant_234080" type="Const" version="opset1">
76
+ <data element_type="i64" shape="" offset="0" size="8" />
77
+ <output>
78
+ <port id="0" precision="I64" />
79
+ </output>
80
+ </layer>
81
+ <layer id="6" name="Constant_234081" type="Const" version="opset1">
82
+ <data element_type="i64" shape="" offset="0" size="8" />
83
+ <output>
84
+ <port id="0" precision="I64" />
85
+ </output>
86
+ </layer>
87
+ <layer id="7" name="Gather_234082" type="Gather" version="opset8">
88
+ <data batch_dims="0" />
89
+ <input>
90
+ <port id="0" precision="I64">
91
+ <dim>1</dim>
92
+ </port>
93
+ <port id="1" precision="I64" />
94
+ <port id="2" precision="I64" />
95
+ </input>
96
+ <output>
97
+ <port id="3" precision="I64" />
98
+ </output>
99
+ </layer>
100
+ <layer id="8" name="Constant_234084" type="Const" version="opset1">
101
+ <data element_type="i64" shape="" offset="8" size="8" />
102
+ <output>
103
+ <port id="0" precision="I64" />
104
+ </output>
105
+ </layer>
106
+ <layer id="9" name="Range_234085" type="Range" version="opset4">
107
+ <data output_type="i32" />
108
+ <input>
109
+ <port id="0" precision="I64" />
110
+ <port id="1" precision="I64" />
111
+ <port id="2" precision="I64" />
112
+ </input>
113
+ <output>
114
+ <port id="3" precision="I32">
115
+ <dim>-1</dim>
116
+ </port>
117
+ </output>
118
+ </layer>
119
+ <layer id="10" name="Constant_234087" type="Const" version="opset1">
120
+ <data element_type="i64" shape="" offset="8" size="8" />
121
+ <output>
122
+ <port id="0" precision="I64" />
123
+ </output>
124
+ </layer>
125
+ <layer id="11" name="Constant_234088" type="Const" version="opset1">
126
+ <data element_type="i64" shape="" offset="8" size="8" />
127
+ <output>
128
+ <port id="0" precision="I64" />
129
+ </output>
130
+ </layer>
131
+ <layer id="12" name="Add_234089" type="Add" version="opset1">
132
+ <data auto_broadcast="numpy" />
133
+ <input>
134
+ <port id="0" precision="I64" />
135
+ <port id="1" precision="I64" />
136
+ </input>
137
+ <output>
138
+ <port id="2" precision="I64" />
139
+ </output>
140
+ </layer>
141
+ <layer id="13" name="Constant_234090" type="Const" version="opset1">
142
+ <data element_type="i64" shape="" offset="8" size="8" />
143
+ <output>
144
+ <port id="0" precision="I64" />
145
+ </output>
146
+ </layer>
147
+ <layer id="14" name="Range_234091" type="Range" version="opset4">
148
+ <data output_type="i32" />
149
+ <input>
150
+ <port id="0" precision="I64" />
151
+ <port id="1" precision="I64" />
152
+ <port id="2" precision="I64" />
153
+ </input>
154
+ <output>
155
+ <port id="3" precision="I32">
156
+ <dim>-1</dim>
157
+ </port>
158
+ </output>
159
+ </layer>
160
+ <layer id="15" name="Constant_234154" type="Const" version="opset1">
161
+ <data element_type="u8" shape="652" offset="16" size="652" />
162
+ <output>
163
+ <port id="0" precision="U8">
164
+ <dim>652</dim>
165
+ </port>
166
+ </output>
167
+ </layer>
168
+ <layer id="16" name="RegexSplit_234155" type="RegexSplit" version="extension">
169
+ <data behaviour="isolate" invert="false" max_splits="-1" />
170
+ <input>
171
+ <port id="0" precision="I32">
172
+ <dim>-1</dim>
173
+ </port>
174
+ <port id="1" precision="I32">
175
+ <dim>-1</dim>
176
+ </port>
177
+ <port id="2" precision="I32">
178
+ <dim>-1</dim>
179
+ </port>
180
+ <port id="3" precision="I32">
181
+ <dim>-1</dim>
182
+ </port>
183
+ <port id="4" precision="U8">
184
+ <dim>-1</dim>
185
+ </port>
186
+ <port id="5" precision="U8">
187
+ <dim>652</dim>
188
+ </port>
189
+ </input>
190
+ <output>
191
+ <port id="6" precision="I32">
192
+ <dim>-1</dim>
193
+ </port>
194
+ <port id="7" precision="I32">
195
+ <dim>-1</dim>
196
+ </port>
197
+ <port id="8" precision="I32">
198
+ <dim>-1</dim>
199
+ </port>
200
+ <port id="9" precision="I32">
201
+ <dim>-1</dim>
202
+ </port>
203
+ <port id="10" precision="U8">
204
+ <dim>-1</dim>
205
+ </port>
206
+ </output>
207
+ </layer>
208
+ <layer id="17" name="Constant_234160" type="Const" version="opset1">
209
+ <data element_type="u8" shape="64" offset="668" size="64" />
210
+ <output>
211
+ <port id="0" precision="U8">
212
+ <dim>64</dim>
213
+ </port>
214
+ </output>
215
+ </layer>
216
+ <layer id="18" name="Constant_234157" type="Const" version="opset1">
217
+ <data element_type="u8" shape="429" offset="732" size="429" />
218
+ <output>
219
+ <port id="0" precision="U8">
220
+ <dim>429</dim>
221
+ </port>
222
+ </output>
223
+ </layer>
224
+ <layer id="19" name="StringTensorUnpack_234158" type="StringTensorUnpack" version="extension">
225
+ <data mode="begins_ends" />
226
+ <input>
227
+ <port id="0" precision="U8">
228
+ <dim>429</dim>
229
+ </port>
230
+ </input>
231
+ <output>
232
+ <port id="1" precision="I32">
233
+ <dim>-1</dim>
234
+ </port>
235
+ <port id="2" precision="I32">
236
+ <dim>-1</dim>
237
+ </port>
238
+ <port id="3" precision="U8">
239
+ <dim>-1</dim>
240
+ </port>
241
+ </output>
242
+ </layer>
243
+ <layer id="20" name="RegexSplit_234161" type="RegexSplit" version="extension">
244
+ <data behaviour="isolate" invert="false" max_splits="-1" />
245
+ <input>
246
+ <port id="0" precision="I32">
247
+ <dim>-1</dim>
248
+ </port>
249
+ <port id="1" precision="I32">
250
+ <dim>-1</dim>
251
+ </port>
252
+ <port id="2" precision="I32">
253
+ <dim>-1</dim>
254
+ </port>
255
+ <port id="3" precision="I32">
256
+ <dim>-1</dim>
257
+ </port>
258
+ <port id="4" precision="U8">
259
+ <dim>-1</dim>
260
+ </port>
261
+ <port id="5" precision="U8">
262
+ <dim>64</dim>
263
+ </port>
264
+ <port id="6" precision="I32">
265
+ <dim>-1</dim>
266
+ </port>
267
+ <port id="7" precision="I32">
268
+ <dim>-1</dim>
269
+ </port>
270
+ <port id="8" precision="U8">
271
+ <dim>-1</dim>
272
+ </port>
273
+ </input>
274
+ <output>
275
+ <port id="9" precision="I32">
276
+ <dim>-1</dim>
277
+ </port>
278
+ <port id="10" precision="I32">
279
+ <dim>-1</dim>
280
+ </port>
281
+ <port id="11" precision="I32">
282
+ <dim>-1</dim>
283
+ </port>
284
+ <port id="12" precision="I32">
285
+ <dim>-1</dim>
286
+ </port>
287
+ <port id="13" precision="U8">
288
+ <dim>-1</dim>
289
+ </port>
290
+ </output>
291
+ </layer>
292
+ <layer id="21" name="BytesToChars_234162" type="BytesToChars" version="extension">
293
+ <input>
294
+ <port id="0" precision="I32">
295
+ <dim>-1</dim>
296
+ </port>
297
+ <port id="1" precision="I32">
298
+ <dim>-1</dim>
299
+ </port>
300
+ <port id="2" precision="I32">
301
+ <dim>-1</dim>
302
+ </port>
303
+ <port id="3" precision="I32">
304
+ <dim>-1</dim>
305
+ </port>
306
+ <port id="4" precision="U8">
307
+ <dim>-1</dim>
308
+ </port>
309
+ </input>
310
+ <output>
311
+ <port id="5" precision="I32">
312
+ <dim>-1</dim>
313
+ </port>
314
+ <port id="6" precision="I32">
315
+ <dim>-1</dim>
316
+ </port>
317
+ <port id="7" precision="I32">
318
+ <dim>-1</dim>
319
+ </port>
320
+ <port id="8" precision="I32">
321
+ <dim>-1</dim>
322
+ </port>
323
+ <port id="9" precision="U8">
324
+ <dim>-1</dim>
325
+ </port>
326
+ </output>
327
+ </layer>
328
+ <layer id="22" name="Constant_234164" type="Const" version="opset1">
329
+ <data element_type="u8" shape="558427" offset="1161" size="558427" />
330
+ <output>
331
+ <port id="0" precision="U8">
332
+ <dim>558427</dim>
333
+ </port>
334
+ </output>
335
+ </layer>
336
+ <layer id="23" name="StringTensorUnpack_234165" type="StringTensorUnpack" version="extension">
337
+ <data mode="begins_ends" />
338
+ <input>
339
+ <port id="0" precision="U8">
340
+ <dim>558427</dim>
341
+ </port>
342
+ </input>
343
+ <output>
344
+ <port id="1" precision="I32">
345
+ <dim>-1</dim>
346
+ </port>
347
+ <port id="2" precision="I32">
348
+ <dim>-1</dim>
349
+ </port>
350
+ <port id="3" precision="U8">
351
+ <dim>-1</dim>
352
+ </port>
353
+ </output>
354
+ </layer>
355
+ <layer id="24" name="Constant_234245" type="Const" version="opset1">
356
+ <data element_type="u8" shape="606619" offset="559588" size="606619" />
357
+ <output>
358
+ <port id="0" precision="U8">
359
+ <dim>606619</dim>
360
+ </port>
361
+ </output>
362
+ </layer>
363
+ <layer id="25" name="StringTensorUnpack_234246" type="StringTensorUnpack" version="extension">
364
+ <data mode="begins_ends" />
365
+ <input>
366
+ <port id="0" precision="U8">
367
+ <dim>606619</dim>
368
+ </port>
369
+ </input>
370
+ <output>
371
+ <port id="1" precision="I32">
372
+ <dim>-1</dim>
373
+ </port>
374
+ <port id="2" precision="I32">
375
+ <dim>-1</dim>
376
+ </port>
377
+ <port id="3" precision="U8">
378
+ <dim>-1</dim>
379
+ </port>
380
+ </output>
381
+ </layer>
382
+ <layer id="26" name="Constant_234173" type="Const" version="opset1">
383
+ <data element_type="i64" shape="" offset="0" size="8" />
384
+ <output>
385
+ <port id="0" precision="I64" />
386
+ </output>
387
+ </layer>
388
+ <layer id="27" name="Constant_234167" type="Const" version="opset1">
389
+ <data element_type="u8" shape="429" offset="732" size="429" />
390
+ <output>
391
+ <port id="0" precision="U8">
392
+ <dim>429</dim>
393
+ </port>
394
+ </output>
395
+ </layer>
396
+ <layer id="28" name="StringTensorUnpack_234168" type="StringTensorUnpack" version="extension">
397
+ <data mode="begins_ends" />
398
+ <input>
399
+ <port id="0" precision="U8">
400
+ <dim>429</dim>
401
+ </port>
402
+ </input>
403
+ <output>
404
+ <port id="1" precision="I32">
405
+ <dim>-1</dim>
406
+ </port>
407
+ <port id="2" precision="I32">
408
+ <dim>-1</dim>
409
+ </port>
410
+ <port id="3" precision="U8">
411
+ <dim>-1</dim>
412
+ </port>
413
+ </output>
414
+ </layer>
415
+ <layer id="29" name="ShapeOf_234169" type="ShapeOf" version="opset3">
416
+ <data output_type="i64" />
417
+ <input>
418
+ <port id="0" precision="I32">
419
+ <dim>-1</dim>
420
+ </port>
421
+ </input>
422
+ <output>
423
+ <port id="1" precision="I64">
424
+ <dim>1</dim>
425
+ </port>
426
+ </output>
427
+ </layer>
428
+ <layer id="30" name="Constant_234170" type="Const" version="opset1">
429
+ <data element_type="i64" shape="" offset="0" size="8" />
430
+ <output>
431
+ <port id="0" precision="I64" />
432
+ </output>
433
+ </layer>
434
+ <layer id="31" name="Constant_234171" type="Const" version="opset1">
435
+ <data element_type="i64" shape="" offset="0" size="8" />
436
+ <output>
437
+ <port id="0" precision="I64" />
438
+ </output>
439
+ </layer>
440
+ <layer id="32" name="Gather_234172" type="Gather" version="opset8">
441
+ <data batch_dims="0" />
442
+ <input>
443
+ <port id="0" precision="I64">
444
+ <dim>1</dim>
445
+ </port>
446
+ <port id="1" precision="I64" />
447
+ <port id="2" precision="I64" />
448
+ </input>
449
+ <output>
450
+ <port id="3" precision="I64" />
451
+ </output>
452
+ </layer>
453
+ <layer id="33" name="Constant_234174" type="Const" version="opset1">
454
+ <data element_type="i64" shape="" offset="8" size="8" />
455
+ <output>
456
+ <port id="0" precision="I64" />
457
+ </output>
458
+ </layer>
459
+ <layer id="34" name="Range_234175" type="Range" version="opset4">
460
+ <data output_type="i32" />
461
+ <input>
462
+ <port id="0" precision="I64" />
463
+ <port id="1" precision="I64" />
464
+ <port id="2" precision="I64" />
465
+ </input>
466
+ <output>
467
+ <port id="3" precision="I32">
468
+ <dim>-1</dim>
469
+ </port>
470
+ </output>
471
+ </layer>
472
+ <layer id="35" name="Constant_234177" type="Const" version="opset1">
473
+ <data element_type="i64" shape="" offset="8" size="8" />
474
+ <output>
475
+ <port id="0" precision="I64" />
476
+ </output>
477
+ </layer>
478
+ <layer id="36" name="Constant_234178" type="Const" version="opset1">
479
+ <data element_type="i64" shape="" offset="8" size="8" />
480
+ <output>
481
+ <port id="0" precision="I64" />
482
+ </output>
483
+ </layer>
484
+ <layer id="37" name="Add_234179" type="Add" version="opset1">
485
+ <data auto_broadcast="numpy" />
486
+ <input>
487
+ <port id="0" precision="I64" />
488
+ <port id="1" precision="I64" />
489
+ </input>
490
+ <output>
491
+ <port id="2" precision="I64" />
492
+ </output>
493
+ </layer>
494
+ <layer id="38" name="Constant_234180" type="Const" version="opset1">
495
+ <data element_type="i64" shape="" offset="8" size="8" />
496
+ <output>
497
+ <port id="0" precision="I64" />
498
+ </output>
499
+ </layer>
500
+ <layer id="39" name="Range_234181" type="Range" version="opset4">
501
+ <data output_type="i32" />
502
+ <input>
503
+ <port id="0" precision="I64" />
504
+ <port id="1" precision="I64" />
505
+ <port id="2" precision="I64" />
506
+ </input>
507
+ <output>
508
+ <port id="3" precision="I32">
509
+ <dim>-1</dim>
510
+ </port>
511
+ </output>
512
+ </layer>
513
+ <layer id="40" name="BytesToChars_234243" type="BytesToChars" version="extension">
514
+ <input>
515
+ <port id="0" precision="I32">
516
+ <dim>-1</dim>
517
+ </port>
518
+ <port id="1" precision="I32">
519
+ <dim>-1</dim>
520
+ </port>
521
+ <port id="2" precision="I32">
522
+ <dim>-1</dim>
523
+ </port>
524
+ <port id="3" precision="I32">
525
+ <dim>-1</dim>
526
+ </port>
527
+ <port id="4" precision="U8">
528
+ <dim>-1</dim>
529
+ </port>
530
+ </input>
531
+ <output>
532
+ <port id="5" precision="I32">
533
+ <dim>-1</dim>
534
+ </port>
535
+ <port id="6" precision="I32">
536
+ <dim>-1</dim>
537
+ </port>
538
+ <port id="7" precision="I32">
539
+ <dim>-1</dim>
540
+ </port>
541
+ <port id="8" precision="I32">
542
+ <dim>-1</dim>
543
+ </port>
544
+ <port id="9" precision="U8">
545
+ <dim>-1</dim>
546
+ </port>
547
+ </output>
548
+ </layer>
549
+ <layer id="41" name="Constant_234247" type="Const" version="opset1">
550
+ <data element_type="i32" shape="25" offset="1166207" size="100" />
551
+ <output>
552
+ <port id="0" precision="I32">
553
+ <dim>25</dim>
554
+ </port>
555
+ </output>
556
+ </layer>
557
+ <layer id="42" name="BPETokenizer_234248" type="BPETokenizer" version="extension">
558
+ <data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" />
559
+ <input>
560
+ <port id="0" precision="I32">
561
+ <dim>-1</dim>
562
+ </port>
563
+ <port id="1" precision="I32">
564
+ <dim>-1</dim>
565
+ </port>
566
+ <port id="2" precision="I32">
567
+ <dim>-1</dim>
568
+ </port>
569
+ <port id="3" precision="I32">
570
+ <dim>-1</dim>
571
+ </port>
572
+ <port id="4" precision="U8">
573
+ <dim>-1</dim>
574
+ </port>
575
+ <port id="5" precision="I32">
576
+ <dim>-1</dim>
577
+ </port>
578
+ <port id="6" precision="I32">
579
+ <dim>-1</dim>
580
+ </port>
581
+ <port id="7" precision="U8">
582
+ <dim>-1</dim>
583
+ </port>
584
+ <port id="8" precision="I32">
585
+ <dim>-1</dim>
586
+ </port>
587
+ <port id="9" precision="I32">
588
+ <dim>-1</dim>
589
+ </port>
590
+ <port id="10" precision="U8">
591
+ <dim>-1</dim>
592
+ </port>
593
+ <port id="11" precision="I32">
594
+ <dim>-1</dim>
595
+ </port>
596
+ <port id="12" precision="I32">
597
+ <dim>-1</dim>
598
+ </port>
599
+ <port id="13" precision="U8">
600
+ <dim>-1</dim>
601
+ </port>
602
+ <port id="14" precision="I32">
603
+ <dim>25</dim>
604
+ </port>
605
+ </input>
606
+ <output>
607
+ <port id="15" precision="I32">
608
+ <dim>-1</dim>
609
+ </port>
610
+ <port id="16" precision="I32">
611
+ <dim>-1</dim>
612
+ </port>
613
+ <port id="17" precision="I32">
614
+ <dim>-1</dim>
615
+ </port>
616
+ </output>
617
+ </layer>
618
+ <layer id="43" name="Subtract_234249" type="Subtract" version="opset1">
619
+ <data auto_broadcast="numpy" />
620
+ <input>
621
+ <port id="0" precision="I32">
622
+ <dim>-1</dim>
623
+ </port>
624
+ <port id="1" precision="I32">
625
+ <dim>-1</dim>
626
+ </port>
627
+ </input>
628
+ <output>
629
+ <port id="2" precision="I32">
630
+ <dim>-1</dim>
631
+ </port>
632
+ </output>
633
+ </layer>
634
+ <layer id="44" name="Constant_234250" type="Const" version="opset1">
635
+ <data element_type="i32" shape="" offset="1166307" size="4" />
636
+ <output>
637
+ <port id="0" precision="I32" />
638
+ </output>
639
+ </layer>
640
+ <layer id="45" name="Minimum_234251" type="Minimum" version="opset1">
641
+ <data auto_broadcast="numpy" />
642
+ <input>
643
+ <port id="0" precision="I32">
644
+ <dim>-1</dim>
645
+ </port>
646
+ <port id="1" precision="I32" />
647
+ </input>
648
+ <output>
649
+ <port id="2" precision="I32">
650
+ <dim>-1</dim>
651
+ </port>
652
+ </output>
653
+ </layer>
654
+ <layer id="46" name="Add_234252" type="Add" version="opset1">
655
+ <data auto_broadcast="numpy" />
656
+ <input>
657
+ <port id="0" precision="I32">
658
+ <dim>-1</dim>
659
+ </port>
660
+ <port id="1" precision="I32">
661
+ <dim>-1</dim>
662
+ </port>
663
+ </input>
664
+ <output>
665
+ <port id="2" precision="I32">
666
+ <dim>-1</dim>
667
+ </port>
668
+ </output>
669
+ </layer>
670
+ <layer id="47" name="Constant_234253" type="Const" version="opset1">
671
+ <data element_type="i32" shape="1" offset="1166311" size="4" />
672
+ <output>
673
+ <port id="0" precision="I32">
674
+ <dim>1</dim>
675
+ </port>
676
+ </output>
677
+ </layer>
678
+ <layer id="48" name="CombineSegments_234254" type="CombineSegments" version="extension">
679
+ <input>
680
+ <port id="0" precision="I32">
681
+ <dim>-1</dim>
682
+ </port>
683
+ <port id="1" precision="I32">
684
+ <dim>-1</dim>
685
+ </port>
686
+ <port id="2" precision="I32">
687
+ <dim>-1</dim>
688
+ </port>
689
+ <port id="3" precision="I32">
690
+ <dim>1</dim>
691
+ </port>
692
+ </input>
693
+ <output>
694
+ <port id="4" precision="I32">
695
+ <dim>-1</dim>
696
+ </port>
697
+ <port id="5" precision="I32">
698
+ <dim>-1</dim>
699
+ </port>
700
+ <port id="6" precision="I32">
701
+ <dim>-1</dim>
702
+ </port>
703
+ <port id="7" precision="I32">
704
+ <dim>-1</dim>
705
+ </port>
706
+ <port id="8" precision="I32">
707
+ <dim>-1</dim>
708
+ </port>
709
+ <port id="9" precision="I32">
710
+ <dim>-1</dim>
711
+ </port>
712
+ </output>
713
+ </layer>
714
+ <layer id="49" name="Subtract_234255" type="Subtract" version="opset1">
715
+ <data auto_broadcast="numpy" />
716
+ <input>
717
+ <port id="0" precision="I32">
718
+ <dim>-1</dim>
719
+ </port>
720
+ <port id="1" precision="I32">
721
+ <dim>-1</dim>
722
+ </port>
723
+ </input>
724
+ <output>
725
+ <port id="2" precision="I32">
726
+ <dim>-1</dim>
727
+ </port>
728
+ </output>
729
+ </layer>
730
+ <layer id="50" name="Constant_234256" type="Const" version="opset1">
731
+ <data element_type="i32" shape="" offset="1166311" size="4" />
732
+ <output>
733
+ <port id="0" precision="I32" />
734
+ </output>
735
+ </layer>
736
+ <layer id="51" name="ReduceMax_234257" type="ReduceMax" version="opset1">
737
+ <data keep_dims="false" />
738
+ <input>
739
+ <port id="0" precision="I32">
740
+ <dim>-1</dim>
741
+ </port>
742
+ <port id="1" precision="I32" />
743
+ </input>
744
+ <output>
745
+ <port id="2" precision="I32" />
746
+ </output>
747
+ </layer>
748
+ <layer id="52" name="Constant_234258" type="Const" version="opset1">
749
+ <data element_type="i32" shape="" offset="1166311" size="4" />
750
+ <output>
751
+ <port id="0" precision="I32" />
752
+ </output>
753
+ </layer>
754
+ <layer id="53" name="RaggedToDense_234259" type="RaggedToDense" version="extension">
755
+ <data pad_right="true" />
756
+ <input>
757
+ <port id="0" precision="I32">
758
+ <dim>-1</dim>
759
+ </port>
760
+ <port id="1" precision="I32">
761
+ <dim>-1</dim>
762
+ </port>
763
+ <port id="2" precision="I32">
764
+ <dim>-1</dim>
765
+ </port>
766
+ <port id="3" precision="I32" />
767
+ <port id="4" precision="I32" />
768
+ </input>
769
+ <output>
770
+ <port id="5" precision="I32">
771
+ <dim>-1</dim>
772
+ <dim>-1</dim>
773
+ </port>
774
+ <port id="6" precision="BOOL">
775
+ <dim>-1</dim>
776
+ <dim>-1</dim>
777
+ </port>
778
+ </output>
779
+ </layer>
780
+ <layer id="54" name="Convert_234260" type="Convert" version="opset1">
781
+ <data destination_type="i32" />
782
+ <input>
783
+ <port id="0" precision="BOOL">
784
+ <dim>-1</dim>
785
+ <dim>-1</dim>
786
+ </port>
787
+ </input>
788
+ <output>
789
+ <port id="1" precision="I32">
790
+ <dim>-1</dim>
791
+ <dim>-1</dim>
792
+ </port>
793
+ </output>
794
+ </layer>
795
+ <layer id="55" name="Convert_234260" type="Convert" version="opset1">
796
+ <data destination_type="i64" />
797
+ <input>
798
+ <port id="0" precision="I32">
799
+ <dim>-1</dim>
800
+ <dim>-1</dim>
801
+ </port>
802
+ </input>
803
+ <output>
804
+ <port id="1" precision="I64" names="attention_mask">
805
+ <dim>-1</dim>
806
+ <dim>-1</dim>
807
+ </port>
808
+ </output>
809
+ </layer>
810
+ <layer id="57" name="RaggedToDense_234259.0" type="Convert" version="opset1">
811
+ <data destination_type="i64" />
812
+ <input>
813
+ <port id="0" precision="I32">
814
+ <dim>-1</dim>
815
+ <dim>-1</dim>
816
+ </port>
817
+ </input>
818
+ <output>
819
+ <port id="1" precision="I64" names="input_ids">
820
+ <dim>-1</dim>
821
+ <dim>-1</dim>
822
+ </port>
823
+ </output>
824
+ </layer>
825
+ <layer id="58" name="Result_234263" type="Result" version="opset1">
826
+ <input>
827
+ <port id="0" precision="I64">
828
+ <dim>-1</dim>
829
+ <dim>-1</dim>
830
+ </port>
831
+ </input>
832
+ </layer>
833
+ <layer id="56" name="Result_234265" type="Result" version="opset1">
834
+ <input>
835
+ <port id="0" precision="I64">
836
+ <dim>-1</dim>
837
+ <dim>-1</dim>
838
+ </port>
839
+ </input>
840
+ </layer>
841
+ </layers>
842
+ <edges>
843
+ <edge from-layer="0" from-port="0" to-layer="2" to-port="0" />
844
+ <edge from-layer="1" from-port="0" to-layer="9" to-port="0" />
845
+ <edge from-layer="2" from-port="1" to-layer="3" to-port="0" />
846
+ <edge from-layer="2" from-port="2" to-layer="3" to-port="1" />
847
+ <edge from-layer="2" from-port="3" to-layer="3" to-port="2" />
848
+ <edge from-layer="3" from-port="5" to-layer="16" to-port="4" />
849
+ <edge from-layer="3" from-port="4" to-layer="16" to-port="3" />
850
+ <edge from-layer="3" from-port="3" to-layer="16" to-port="2" />
851
+ <edge from-layer="3" from-port="3" to-layer="4" to-port="0" />
852
+ <edge from-layer="4" from-port="1" to-layer="7" to-port="0" />
853
+ <edge from-layer="5" from-port="0" to-layer="7" to-port="1" />
854
+ <edge from-layer="6" from-port="0" to-layer="7" to-port="2" />
855
+ <edge from-layer="7" from-port="3" to-layer="9" to-port="1" />
856
+ <edge from-layer="7" from-port="3" to-layer="12" to-port="0" />
857
+ <edge from-layer="8" from-port="0" to-layer="9" to-port="2" />
858
+ <edge from-layer="9" from-port="3" to-layer="16" to-port="0" />
859
+ <edge from-layer="10" from-port="0" to-layer="14" to-port="0" />
860
+ <edge from-layer="11" from-port="0" to-layer="12" to-port="1" />
861
+ <edge from-layer="12" from-port="2" to-layer="14" to-port="1" />
862
+ <edge from-layer="13" from-port="0" to-layer="14" to-port="2" />
863
+ <edge from-layer="14" from-port="3" to-layer="16" to-port="1" />
864
+ <edge from-layer="15" from-port="0" to-layer="16" to-port="5" />
865
+ <edge from-layer="16" from-port="6" to-layer="20" to-port="0" />
866
+ <edge from-layer="16" from-port="7" to-layer="20" to-port="1" />
867
+ <edge from-layer="16" from-port="8" to-layer="20" to-port="2" />
868
+ <edge from-layer="16" from-port="9" to-layer="20" to-port="3" />
869
+ <edge from-layer="16" from-port="10" to-layer="20" to-port="4" />
870
+ <edge from-layer="17" from-port="0" to-layer="20" to-port="5" />
871
+ <edge from-layer="18" from-port="0" to-layer="19" to-port="0" />
872
+ <edge from-layer="19" from-port="1" to-layer="20" to-port="6" />
873
+ <edge from-layer="19" from-port="2" to-layer="20" to-port="7" />
874
+ <edge from-layer="19" from-port="3" to-layer="20" to-port="8" />
875
+ <edge from-layer="20" from-port="9" to-layer="21" to-port="0" />
876
+ <edge from-layer="20" from-port="13" to-layer="21" to-port="4" />
877
+ <edge from-layer="20" from-port="12" to-layer="21" to-port="3" />
878
+ <edge from-layer="20" from-port="10" to-layer="21" to-port="1" />
879
+ <edge from-layer="20" from-port="11" to-layer="21" to-port="2" />
880
+ <edge from-layer="21" from-port="9" to-layer="42" to-port="4" />
881
+ <edge from-layer="21" from-port="8" to-layer="42" to-port="3" />
882
+ <edge from-layer="21" from-port="7" to-layer="42" to-port="2" />
883
+ <edge from-layer="21" from-port="6" to-layer="42" to-port="1" />
884
+ <edge from-layer="21" from-port="5" to-layer="42" to-port="0" />
885
+ <edge from-layer="22" from-port="0" to-layer="23" to-port="0" />
886
+ <edge from-layer="23" from-port="3" to-layer="42" to-port="7" />
887
+ <edge from-layer="23" from-port="2" to-layer="42" to-port="6" />
888
+ <edge from-layer="23" from-port="1" to-layer="42" to-port="5" />
889
+ <edge from-layer="24" from-port="0" to-layer="25" to-port="0" />
890
+ <edge from-layer="25" from-port="3" to-layer="42" to-port="10" />
891
+ <edge from-layer="25" from-port="2" to-layer="42" to-port="9" />
892
+ <edge from-layer="25" from-port="1" to-layer="42" to-port="8" />
893
+ <edge from-layer="26" from-port="0" to-layer="34" to-port="0" />
894
+ <edge from-layer="27" from-port="0" to-layer="28" to-port="0" />
895
+ <edge from-layer="28" from-port="2" to-layer="40" to-port="3" />
896
+ <edge from-layer="28" from-port="1" to-layer="29" to-port="0" />
897
+ <edge from-layer="28" from-port="3" to-layer="40" to-port="4" />
898
+ <edge from-layer="28" from-port="1" to-layer="40" to-port="2" />
899
+ <edge from-layer="29" from-port="1" to-layer="32" to-port="0" />
900
+ <edge from-layer="30" from-port="0" to-layer="32" to-port="1" />
901
+ <edge from-layer="31" from-port="0" to-layer="32" to-port="2" />
902
+ <edge from-layer="32" from-port="3" to-layer="34" to-port="1" />
903
+ <edge from-layer="32" from-port="3" to-layer="37" to-port="0" />
904
+ <edge from-layer="33" from-port="0" to-layer="34" to-port="2" />
905
+ <edge from-layer="34" from-port="3" to-layer="40" to-port="0" />
906
+ <edge from-layer="35" from-port="0" to-layer="39" to-port="0" />
907
+ <edge from-layer="36" from-port="0" to-layer="37" to-port="1" />
908
+ <edge from-layer="37" from-port="2" to-layer="39" to-port="1" />
909
+ <edge from-layer="38" from-port="0" to-layer="39" to-port="2" />
910
+ <edge from-layer="39" from-port="3" to-layer="40" to-port="1" />
911
+ <edge from-layer="40" from-port="7" to-layer="42" to-port="11" />
912
+ <edge from-layer="40" from-port="8" to-layer="42" to-port="12" />
913
+ <edge from-layer="40" from-port="9" to-layer="42" to-port="13" />
914
+ <edge from-layer="41" from-port="0" to-layer="42" to-port="14" />
915
+ <edge from-layer="42" from-port="17" to-layer="48" to-port="2" />
916
+ <edge from-layer="42" from-port="15" to-layer="48" to-port="0" />
917
+ <edge from-layer="42" from-port="15" to-layer="46" to-port="0" />
918
+ <edge from-layer="42" from-port="15" to-layer="43" to-port="1" />
919
+ <edge from-layer="42" from-port="16" to-layer="43" to-port="0" />
920
+ <edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
921
+ <edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
922
+ <edge from-layer="45" from-port="2" to-layer="46" to-port="1" />
923
+ <edge from-layer="46" from-port="2" to-layer="48" to-port="1" />
924
+ <edge from-layer="47" from-port="0" to-layer="48" to-port="3" />
925
+ <edge from-layer="48" from-port="5" to-layer="49" to-port="0" />
926
+ <edge from-layer="48" from-port="4" to-layer="49" to-port="1" />
927
+ <edge from-layer="48" from-port="4" to-layer="53" to-port="0" />
928
+ <edge from-layer="48" from-port="5" to-layer="53" to-port="1" />
929
+ <edge from-layer="48" from-port="6" to-layer="53" to-port="2" />
930
+ <edge from-layer="49" from-port="2" to-layer="51" to-port="0" />
931
+ <edge from-layer="50" from-port="0" to-layer="51" to-port="1" />
932
+ <edge from-layer="51" from-port="2" to-layer="53" to-port="3" />
933
+ <edge from-layer="52" from-port="0" to-layer="53" to-port="4" />
934
+ <edge from-layer="53" from-port="6" to-layer="54" to-port="0" />
935
+ <edge from-layer="53" from-port="5" to-layer="57" to-port="0" />
936
+ <edge from-layer="54" from-port="1" to-layer="55" to-port="0" />
937
+ <edge from-layer="55" from-port="1" to-layer="56" to-port="0" />
938
+ <edge from-layer="57" from-port="1" to-layer="58" to-port="0" />
939
+ </edges>
940
+ <rt_info>
941
+ <eos_token_id value="0" />
942
+ </rt_info>
943
+ </net>
special_tokens_map.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|endoftext|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|endoftext|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "unk_token": {
21
+ "content": "<|endoftext|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ }
27
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:407d001d18582959881b30ff39b660d4bd5ee50957d03f57daeeb3059e9db54c
3
+ size 6128796
tokenizer_config.json ADDED
@@ -0,0 +1,234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<|endoftext|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<|padding|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "50254": {
23
+ "content": " ",
24
+ "lstrip": false,
25
+ "normalized": true,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "50255": {
31
+ "content": " ",
32
+ "lstrip": false,
33
+ "normalized": true,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": false
37
+ },
38
+ "50256": {
39
+ "content": " ",
40
+ "lstrip": false,
41
+ "normalized": true,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "50257": {
47
+ "content": " ",
48
+ "lstrip": false,
49
+ "normalized": true,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "50258": {
55
+ "content": " ",
56
+ "lstrip": false,
57
+ "normalized": true,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "50259": {
63
+ "content": " ",
64
+ "lstrip": false,
65
+ "normalized": true,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": false
69
+ },
70
+ "50260": {
71
+ "content": " ",
72
+ "lstrip": false,
73
+ "normalized": true,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": false
77
+ },
78
+ "50261": {
79
+ "content": " ",
80
+ "lstrip": false,
81
+ "normalized": true,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": false
85
+ },
86
+ "50262": {
87
+ "content": " ",
88
+ "lstrip": false,
89
+ "normalized": true,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": false
93
+ },
94
+ "50263": {
95
+ "content": " ",
96
+ "lstrip": false,
97
+ "normalized": true,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": false
101
+ },
102
+ "50264": {
103
+ "content": " ",
104
+ "lstrip": false,
105
+ "normalized": true,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": false
109
+ },
110
+ "50265": {
111
+ "content": " ",
112
+ "lstrip": false,
113
+ "normalized": true,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": false
117
+ },
118
+ "50266": {
119
+ "content": " ",
120
+ "lstrip": false,
121
+ "normalized": true,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "50267": {
127
+ "content": " ",
128
+ "lstrip": false,
129
+ "normalized": true,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "50268": {
135
+ "content": " ",
136
+ "lstrip": false,
137
+ "normalized": true,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "50269": {
143
+ "content": " ",
144
+ "lstrip": false,
145
+ "normalized": true,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "50270": {
151
+ "content": " ",
152
+ "lstrip": false,
153
+ "normalized": true,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "50271": {
159
+ "content": " ",
160
+ "lstrip": false,
161
+ "normalized": true,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "50272": {
167
+ "content": " ",
168
+ "lstrip": false,
169
+ "normalized": true,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "50273": {
175
+ "content": " ",
176
+ "lstrip": false,
177
+ "normalized": true,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "50274": {
183
+ "content": " ",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "50275": {
191
+ "content": " ",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "50276": {
199
+ "content": " ",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "50277": {
207
+ "content": "<|im_start|>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": true
213
+ },
214
+ "50278": {
215
+ "content": "<|im_end|>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ }
222
+ },
223
+ "additional_special_tokens": [
224
+ "<|im_start|>",
225
+ "<|im_end|>"
226
+ ],
227
+ "bos_token": "<|endoftext|>",
228
+ "clean_up_tokenization_spaces": true,
229
+ "eos_token": "<|endoftext|>",
230
+ "model_max_length": 2048,
231
+ "pad_token": null,
232
+ "tokenizer_class": "GPTNeoXTokenizer",
233
+ "unk_token": "<|endoftext|>"
234
+ }