File size: 491 Bytes
ec14ead |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
name: "postprocessing"
backend: "python"
max_batch_size: 1
input [
{
name: "TOKENS_BATCH"
data_type: TYPE_UINT32
dims: [ -1, -1 ]
},
{
name: "sequence_length"
data_type: TYPE_UINT32
dims: [ -1 ]
}
]
output [
{
name: "OUTPUT"
data_type: TYPE_STRING
dims: [ -1, -1 ]
}
]
instance_group [
{
count: 16
kind: KIND_CPU
}
]
parameters {
key: "tokenizer_path"
value: {
string_value: "tokenizer/tokenizer.model"
}
}
|