|
<?xml version="1.0"?> |
|
<net name="detokenizer" version="11"> |
|
<layers> |
|
<layer id="0" name="Parameter_237586" type="Parameter" version="opset1"> |
|
<data shape="?,?" element_type="i64" /> |
|
<output> |
|
<port id="0" precision="I64" names="Parameter_237586"> |
|
<dim>-1</dim> |
|
<dim>-1</dim> |
|
</port> |
|
</output> |
|
</layer> |
|
<layer id="1" name="Constant_237562" type="Const" version="opset1"> |
|
<data element_type="u8" shape="499991" offset="0" size="499991" /> |
|
<output> |
|
<port id="0" precision="U8"> |
|
<dim>499991</dim> |
|
</port> |
|
</output> |
|
</layer> |
|
<layer id="2" name="Convert_237596" type="Convert" version="opset1"> |
|
<data destination_type="i32" /> |
|
<input> |
|
<port id="0" precision="I64"> |
|
<dim>-1</dim> |
|
<dim>-1</dim> |
|
</port> |
|
</input> |
|
<output> |
|
<port id="1" precision="I32"> |
|
<dim>-1</dim> |
|
<dim>-1</dim> |
|
</port> |
|
</output> |
|
</layer> |
|
<layer id="3" name="SentencepieceDetokenizer_237587" type="SentencepieceDetokenizer" version="extension"> |
|
<input> |
|
<port id="0" precision="U8"> |
|
<dim>499991</dim> |
|
</port> |
|
<port id="1" precision="I32"> |
|
<dim>-1</dim> |
|
<dim>-1</dim> |
|
</port> |
|
</input> |
|
<output> |
|
<port id="2" precision="I32"> |
|
<dim>-1</dim> |
|
</port> |
|
<port id="3" precision="I32"> |
|
<dim>-1</dim> |
|
</port> |
|
<port id="4" precision="U8"> |
|
<dim>-1</dim> |
|
</port> |
|
</output> |
|
</layer> |
|
<layer id="4" name="StringTensorPack_237588" type="StringTensorPack" version="extension"> |
|
<data mode="begins_ends" /> |
|
<input> |
|
<port id="0" precision="I32"> |
|
<dim>-1</dim> |
|
</port> |
|
<port id="1" precision="I32"> |
|
<dim>-1</dim> |
|
</port> |
|
<port id="2" precision="U8"> |
|
<dim>-1</dim> |
|
</port> |
|
</input> |
|
<output> |
|
<port id="3" precision="STRING" names="string_output"> |
|
<dim>-1</dim> |
|
</port> |
|
</output> |
|
</layer> |
|
<layer id="5" name="Result_237589" type="Result" version="opset1"> |
|
<input> |
|
<port id="0" precision="STRING"> |
|
<dim>-1</dim> |
|
</port> |
|
</input> |
|
</layer> |
|
</layers> |
|
<edges> |
|
<edge from-layer="0" from-port="0" to-layer="2" to-port="0" /> |
|
<edge from-layer="1" from-port="0" to-layer="3" to-port="0" /> |
|
<edge from-layer="2" from-port="1" to-layer="3" to-port="1" /> |
|
<edge from-layer="3" from-port="2" to-layer="4" to-port="0" /> |
|
<edge from-layer="3" from-port="3" to-layer="4" to-port="1" /> |
|
<edge from-layer="3" from-port="4" to-layer="4" to-port="2" /> |
|
<edge from-layer="4" from-port="3" to-layer="5" to-port="0" /> |
|
</edges> |
|
<rt_info> |
|
<bos_token_id value="1" /> |
|
<chat_template value="{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|> ' + message['content'] + '<|end|> '}}{% elif message['role'] == 'user' %}{{'<|user|> ' + message['content'] + '<|end|> '}}{% elif message['role'] == 'assistant' %}{{'<|assistant|> ' + message['content'] + '<|end|> '}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|> ' }}{% else %}{{ eos_token }}{% endif %}" /> |
|
<eos_token_id value="32000" /> |
|
<original_tokenizer_class value="<class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" /> |
|
<pad_token_id value="32000" /> |
|
</rt_info> |
|
</net> |
|
|