File size: 526 Bytes
0168051 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
name: "preprocessing"
backend: "python"
max_batch_size: 1
input [
{
name: "QUERY"
data_type: TYPE_STRING
dims: [ -1 ]
}
]
output [
{
name: "INPUT_ID"
data_type: TYPE_UINT32
dims: [ -1 ]
},
{
name: "REQUEST_INPUT_LEN"
data_type: TYPE_UINT32
dims: [ 1 ]
}
]
instance_group [
{
count: 4
kind: KIND_CPU
}
]
parameters {
key: "tokenizer_path"
value: {
string_value: "tokenizer/tokenizer.model"
}
}
|