server
server copied to clipboard
can't print chinese in Python backend
Python backend
import json
import triton_python_backend_utils as pb_utils
class TritonPythonModel:
def initialize(self, args):
self.model_config = model_config = json.loads(args['model_config'])
output0_config = pb_utils.get_output_config_by_name(
model_config, "OUTPUT0")
output1_config = pb_utils.get_output_config_by_name(
model_config, "OUTPUT1")
# Convert Triton types to numpy types
self.output0_dtype = pb_utils.triton_string_to_numpy(
output0_config['data_type'])
self.output1_dtype = pb_utils.triton_string_to_numpy(
output1_config['data_type'])
def execute(self, requests):
output0_dtype = self.output0_dtype
output1_dtype = self.output1_dtype
responses = []
for request in requests:
print('你好')
in_0 = pb_utils.get_input_tensor_by_name(request, "INPUT0")
in_1 = pb_utils.get_input_tensor_by_name(request, "INPUT1")
out_0, out_1 = (in_0.as_numpy() + in_1.as_numpy(), in_0.as_numpy() - in_1.as_numpy())
# Create output tensors. You need pb_utils.Tensor
# objects to create pb_utils.InferenceResponse.
out_tensor_0 = pb_utils.Tensor("OUTPUT0",out_0.astype(output0_dtype))
out_tensor_1 = pb_utils.Tensor("OUTPUT1",out_1.astype(output1_dtype))
inference_response = pb_utils.InferenceResponse(
output_tensors=[out_tensor_0, out_tensor_1])
responses.append(inference_response)
# You should return a list of pb_utils.InferenceResponse. Length
# of this list must match the length of `requests` list.
return responses
def finalize(self):
print('Cleaning up...')
output is binary. how to print chinese?
add export PYTHONIOENCODING=UTF-8
when start tritonserver @pogevip
docker run --rm \
-p8000:8000 -p8001:8001 -p8002:8002 \
-v ${model_repo}:/models \
${image_name} \
bash -c "export PYTHONIOENCODING=UTF-8 && tritonserver --model-repository=/models "
ps, you may need to add flush=True
when print, print('你好', flush=True)
Thanks @Jackiexiao for providing the solution. @pogevip I'm going to close this issue for now. Please re-open the issue if you would like to follow up with this.