Serving icon indicating copy to clipboard operation
Serving copied to clipboard

能否提供PaddleDetection 在win10下运行的webserver和Client的例子

Open zzkzzk1984 opened this issue 3 years ago • 17 comments

试了各种写法,参考ocr 的代码也不行,快崩溃了

filename:your_webservice.py

from paddle_serving_server.web_service import WebService import base64 import numpy as np import cv2

如果是GPU版本,请使用 from paddle_serving_server.web_service import WebService

class MyWebService(WebService): def preprocess(self, feed=[], fetch=[], is_batch=[]): #在这里实现前处理 #feed_dict是 key: var names, value: numpy array input #fetch_names 是fetch变量名列表 #is_batch的含义是feed_dict的value里的numpy array是否包含了batch维度

    data = base64.b64decode(feed["image"].encode('utf8'))
    data = np.frombuffer(data, np.uint8)
    im = cv2.imdecode(data, cv2.IMREAD_COLOR)

    feed["image"] = data
    feed["im_shape"] = np.array(list(im.shape[1:])).reshape(-1)
    feed["scale_factor"] = np.array([1.0, 1.0]).reshape(-1)

    return feed, fetch, is_batch
def postprocess(self, feed={}, fetch=[], fetch_map=None):
    #fetch map是经过预测之后的返回字典,key是process返回时给定的fetch names,value是对应fetch names的var具体值
    #在这里做处理之后,结果需重新转换成字典,并且values的类型应是列表list,这样可以JSON序列化方便web返回
    return response

my_service = MyWebService(name="fasterRCNN") my_service.load_model_config("serving_server") my_service.prepare_server(workdir="workdir", port=9292)

如果是GPU用户,可以参照python/examples/ocr下的python示例

my_service.run_debugger_service()

Windows平台不可以使用 run_rpc_service()接口

my_service.run_web_service()

import requests import json import cv2 import base64 import os, sys import time import numpy as np from PIL import Image

import sys from paddle_serving_app.reader import *

二进制转numpy

#with open(image_path, "rb") as file:

jpg_bin = file.read()

image = cv2.imdecode(np.asarray(bytearray(jpg_bin), dtype='uint8'), cv2.IMREAD_COLOR)

numpy转二进制

#with open(tmp_image_path, 'wb') as tmp_file:

tmp_jpg_bin = np.array(cv2.imencode('.jpg', image)[1]).tobytes()

tmp_file.write(tmp_jpg_bin)

preprocess = Sequential([ File2Image(), BGR2RGB(), Resize( (608, 608), interpolation=cv2.INTER_LINEAR), Div(255.0), Transpose( (2, 0, 1)) ])

def cv2_to_base64(image): #data = cv2.imdecode(np.asarray(bytearray(image), dtype='uint8'), cv2.IMREAD_COLOR) #image = cv2.cvtColor(data, cv2.COLOR_RGB2GRAY) #tmp_jpg_bin = np.array(cv2.imencode('.jpg', image)[1]).tobytes() tmp_jpg_bin = image return base64.b64encode(tmp_jpg_bin).decode( 'utf8') #data.tostring()).decode('utf8')

headers = {"Content-type": "application/json"} url = "http://127.0.0.1:9292/fasterRCNN/prediction"

test_img_dir = "imgs/" for idx, img_file in enumerate(os.listdir(test_img_dir)): with open(os.path.join(test_img_dir, img_file), 'rb') as file: image_data1 = file.read() im = preprocess(os.path.join(test_img_dir, img_file)) image = cv2_to_base64(image_data1) for i in range(1): #data = {"feed": [{"im_shape": []},{"image": image},{"scale_factor": "6654"}], "fetch": [{"save_infer_model/scale_0.tmp_1": "555"},{"save_infer_model/scale_1.tmp_1": "666"}], "is_batch": False} #data = new map #data["feed"]["image"] = image; #data["feed"]["im_shape"] = np.array(list(im.shape[1:])).reshape(-1); #data["feed"]["scale_factor"] = np.array([1.0, 1.0]).reshape(-1); #data["fetch"] = ["save_infer_model/scale_0.tmp_1"]; #data["batch"] = False;

    data = {"feed": {
        "image": image,
        "im_shape": [608,608],
        "scale_factor": [1.0,1.0],
    },
        "fetch": ["save_infer_model/scale_0.tmp_1"],
        "is_batch": False}
    r = requests.post(url=url, headers=headers, data=json.dumps(data))
    print(r.json())

test_img_dir = "imgs/" print("==> total number of test imgs: ", len(os.listdir(test_img_dir)))

zzkzzk1984 avatar Nov 05 '21 01:11 zzkzzk1984

Message that will be displayed on users' first issue

github-actions[bot] avatar Nov 05 '21 01:11 github-actions[bot]

服务端运行日志: Connected to pydev debugger (build 193.6494.30) This API will be deprecated later. Please do not use it This API will be deprecated later. Please do not use it web service address: http://172.16.13.18:9292/fasterRCNN/prediction This API will be deprecated later. Please do not use it

  • Serving Flask app "paddle_serving_server.web_service" (lazy loading)
  • Environment: production WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
  • Debug mode: off I1105 09:35:07.605020 18672 analysis_predictor.cc:612] ir_optim is turned off, no IR pass will be executed e[1me[35m--- Running analysis [ir_graph_build_pass]e[0m e[1me[35m--- Running analysis [ir_graph_clean_pass]e[0m e[1me[35m--- Running analysis [ir_analysis_pass]e[0m e[1me[35m--- Running analysis [ir_params_sync_among_devices_pass]e[0m e[1me[35m--- Running analysis [adjust_cudnn_workspace_size_pass]e[0m e[1me[35m--- Running analysis [inference_op_replace_pass]e[0m e[1me[35m--- Running analysis [memory_optimize_pass]e[0m I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : flatten_1.tmp_0 size: 50176 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : roi_align_3.tmp_0 size: 50176 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : relu_23.tmp_0 size: 2048 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : relu_52.tmp_0 size: 1024 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : roi_align_1.tmp_0 size: 50176 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : relu_41.tmp_0 size: 4096 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : concat_4.tmp_0 size: 50176 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : roi_align_0.tmp_0 size: 50176 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : conv2d_125.tmp_1 size: 1024 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : relu_51.tmp_0 size: 1024 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : conv2d_132.tmp_1 size: 1024 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : image size: 12 I1105 09:35:07.764523 18672 memory_optimize_pass.cc:199] Cluster name : shape_2.tmp_0_slice_0 size: 8 I1105 09:35:07.765022 18672 memory_optimize_pass.cc:199] Cluster name : range_0.tmp_0 size: 4 I1105 09:35:07.765022 18672 memory_optimize_pass.cc:199] Cluster name : distribute_fpn_proposals_0.tmp_4 size: 4 I1105 09:35:07.765022 18672 memory_optimize_pass.cc:199] Cluster name : distribute_fpn_proposals_0.tmp_8 size: 4 e[1me[35m--- Running analysis [ir_graph_to_program_pass]e[0m I1105 09:35:08.004521 18672 analysis_predictor.cc:636] ======= optimize end ======= I1105 09:35:08.005020 18672 naive_executor.cc:98] --- skip [feed], feed -> scale_factor I1105 09:35:08.005020 18672 naive_executor.cc:98] --- skip [feed], feed -> image I1105 09:35:08.005020 18672 naive_executor.cc:98] --- skip [feed], feed -> im_shape I1105 09:35:08.014020 18672 naive_executor.cc:98] --- skip [roi_align_3.tmp_0], fetch -> fetch I1105 09:35:08.014020 18672 naive_executor.cc:98] --- skip [flatten_1.tmp_0], fetch -> fetch

zzkzzk1984 avatar Nov 05 '21 01:11 zzkzzk1984

客户端运行日志 C:\env\Scripts\python.exe "C:\Program Files\JetBrains\PyCharm 2019.3.3\plugins\python\helpers\pydev\pydevd.py" --multiproc --qt-support=auto --client 127.0.0.1 --port 54207 --file C:/workspace/Serving/python/paddle_serving_server/ocr_web_client.py pydev debugger: process 28296 is connecting

Connected to pydev debugger (build 193.6494.30) {'result': 'In user code:\n\n File "tools/export_model.py", line 113, in \n main()\n File "tools/export_model.py", line 109, in main\n run(FLAGS, cfg)\n File "tools/export_model.py", line 77, in run\n trainer.export(FLAGS.output_dir)\n File "/home/aistudio/work/PaddleDetection/ppdet/engine/trainer.py", line 573, in export\n input_spec, static_model.forward.main_program,\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 537, in main_program\n concrete_program = self.concrete_program\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 453, in concrete_program\n return self.concrete_program_specify_input_spec(input_spec=None)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 491, in concrete_program_specify_input_spec\n *desired_input_spec)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 401, in get_concrete_program\n concrete_program, partial_program_layer = self._program_cache[cache_key]\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 714, in getitem\n self._caches[item] = self._build_once(item)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 705, in _build_once\n class_instance=cache_key.class_instance)\n File "", line 2, in from_func_spec\n \n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/wrapped_decorator.py", line 25, in impl\n return wrapped_func(*args, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/base.py", line 40, in impl\n return func(*args, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 655, in from_func_spec\n outputs = static_func(*inputs)\n File "/tmp/tmp3xcgal5i.py", line 29, in forward\n false_fn_1, (), (), (out,))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 210, in convert_ifelse\n return _run_py_ifelse(pred, true_fn, false_fn, true_args, false_args)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 235, in _run_py_ifelse\n return true_fn(*true_args) if pred else false_fn(*false_args)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/meta_arch.py", line 28, in forward\n out = self.get_pred()\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/faster_rcnn.py", line 104, in get_pred\n bbox_pred, bbox_num = self._forward()\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/faster_rcnn.py", line 72, in _forward\n body_feats = self.backbone(self.inputs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py", line 902, in call\n outputs = self.forward(*inputs, **kwargs)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/backbones/resnet.py", line 582, in forward\n conv1 = self.conv1(x)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py", line 902, in call\n outputs = self.forward(*inputs, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/container.py", line 98, in forward\n input = layer(input)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py", line 902, in call\n outputs = self.forward(*inputs, **kwargs)\n File "/tmp/tmphg89kqmj.py", line 24, in forward\n (inputs, self), (out,))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 210, in convert_ifelse\n return _run_py_ifelse(pred, true_fn, false_fn, true_args, false_args)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 235, in _run_py_ifelse\n return true_fn(*true_args) if pred else false_fn(*false_args)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/backbones/resnet.py", line 122, in forward\n out = self.conv(inputs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py", line 902, in call\n outputs = self.forward(*inputs, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/layer/conv.py", line 667, in forward\n use_cudnn=self._use_cudnn)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/nn/functional/conv.py", line 139, in _conv_nd\n type=op_type, inputs=inputs, outputs=outputs, attrs=attrs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/layer_helper.py", line 43, in append_op\n return self.main_program.current_block().append_op(*args, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 2942, in append_op\n attrs=kwargs.get("attrs", None))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 2014, in init\n for frame in traceback.extract_stack():\n\n InvalidArgumentError: The input of Op(Conv) should be a 4-D or 5-D Tensor. But received: input's dimension is 1, input's shape is [59158].\n [Hint: Expected in_dims.size() == 4 || in_dims.size() == 5 == true, but received in_dims.size() == 4 || in_dims.size() == 5:0 != true:1.] (at C:/home/workspace/Paddle_release/paddle/fluid/operators/conv_op.cc:65)\n [operator < conv2d > error]'} ==> total number of test imgs: 1

Process finished with exit code 0

zzkzzk1984 avatar Nov 05 '21 01:11 zzkzzk1984

目前试了几种paddle-serving例子 1、win10 的python 端 webServer,win10 的python 端webClient调用成功,但返回值莫名其妙 2、centos7的python 端rpcServer,centos7的python 端rpcClient调用成功,返回正常 3、centos7的python 端rpcServer,win10 的python 端rpcClient无法执行 4、centos7的python 端rpcServer(不清楚是否含web),win10 的python 端webClient调用失败 5、centos7的python 端rpcServer,win10 的java端Client调用失败 6、win10的python 端 orcwebServer,win10 的python 端orcwebClient调用成功,返回正常

始终来说,自己的模型一直无法web调用,只有centos7的python 端rpcClient调用成功了

zzkzzk1984 avatar Nov 05 '21 06:11 zzkzzk1984

您好,您在windows上使用docker部署吗?

TeslaZhao avatar Nov 08 '21 03:11 TeslaZhao

不是,是在win10上的PyCharm 上面跑的

zzkzzk1984 avatar Nov 08 '21 03:11 zzkzzk1984

那是不行的,windows环境下需要使用docker才能运行。或者仅可以运行基于flask的示例。https://github.com/PaddlePaddle/Serving/blob/develop/doc/WINDOWS_TUTORIAL_CN.md

TeslaZhao avatar Nov 08 '21 03:11 TeslaZhao

已经试过你说的这个例子 image 结果客户端报了服务端的一段错误日志 image

zzkzzk1984 avatar Nov 08 '21 03:11 zzkzzk1984

日志如下: C:\env\Scripts\python.exe C:/workspace/Serving/python/paddle_serving_server/ocr_web_client.py {'result': 'In user code:\n\n File "tools/export_model.py", line 113, in \n main()\n File "tools/export_model.py", line 109, in main\n run(FLAGS, cfg)\n File "tools/export_model.py", line 77, in run\n trainer.export(FLAGS.output_dir)\n File "/home/aistudio/work/PaddleDetection/ppdet/engine/trainer.py", line 573, in export\n input_spec, static_model.forward.main_program,\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 537, in main_program\n concrete_program = self.concrete_program\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 453, in concrete_program\n return self.concrete_program_specify_input_spec(input_spec=None)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 491, in concrete_program_specify_input_spec\n *desired_input_spec)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 401, in get_concrete_program\n concrete_program, partial_program_layer = self._program_cache[cache_key]\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 714, in getitem\n self._caches[item] = self._build_once(item)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 705, in _build_once\n class_instance=cache_key.class_instance)\n File "", line 2, in from_func_spec\n \n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/wrapped_decorator.py", line 25, in impl\n return wrapped_func(*args, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/base.py", line 40, in impl\n return func(*args, **kwargs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/program_translator.py", line 655, in from_func_spec\n outputs = static_func(*inputs)\n File "/tmp/tmp3xcgal5i.py", line 29, in forward\n false_fn_1, (), (), (out,))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 210, in convert_ifelse\n return _run_py_ifelse(pred, true_fn, false_fn, true_args, false_args)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 235, in _run_py_ifelse\n return true_fn(*true_args) if pred else false_fn(*false_args)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/meta_arch.py", line 28, in forward\n out = self.get_pred()\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/faster_rcnn.py", line 104, in get_pred\n bbox_pred, bbox_num = self._forward()\n File "/tmp/tmpblrha88o.py", line 56, in _forward\n rois, rois_num))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 210, in convert_ifelse\n return _run_py_ifelse(pred, true_fn, false_fn, true_args, false_args)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 235, in _run_py_ifelse\n return true_fn(*true_args) if pred else false_fn(*false_args)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/architectures/faster_rcnn.py", line 81, in _forward\n rois, rois_num, _ = self.rpn_head(body_feats, self.inputs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/layers.py", line 902, in call\n outputs = self.forward(*inputs, **kwargs)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/proposal_generator/rpn_head.py", line 133, in forward\n rois, rois_num = self._gen_proposal(scores, deltas, anchors, inputs)\n File "/tmp/tmp50jdqjxm.py", line 102, in _gen_proposal\n i, bbox_deltas, bs_rois_num_collect, anchors, im_shape, scores])\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 42, in convert_while_loop\n loop_vars = _run_paddle_while_loop(cond, body, loop_vars)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/dygraph/dygraph_to_static/convert_operators.py", line 52, in _run_paddle_while_loop\n loop_vars = control_flow.while_loop(cond, body, loop_vars)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/layers/control_flow.py", line 1203, in while_loop\n output_vars = body(*new_loop_vars)\n File "/home/aistudio/work/PaddleDetection/ppdet/modeling/proposal_generator/rpn_head.py", line 168, in _gen_proposal\n anchors=anchor,\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 1658, in getitem\n return getitem_impl(self, item)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/variable_index.py", line 225, in getitem_impl\n attrs=attrs)\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 2942, in append_op\n attrs=kwargs.get("attrs", None))\n File "/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddle/fluid/framework.py", line 2014, in init\n for frame in traceback.extract_stack():\n\n InvalidArgumentError: When step > 0, end should be greater than start, but received end = 1, start = 1.\n [Hint: Expected end > start, but received end:1 <= start:1.] (at C:\home\workspace\Paddle_release\paddle/fluid/operators/slice_utils.h:59)\n [operator < slice > error]'} ==> total number of test imgs: 1

zzkzzk1984 avatar Nov 08 '21 03:11 zzkzzk1984

您pip安装的paddle的版本是?

TeslaZhao avatar Nov 08 '21 03:11 TeslaZhao

image

zzkzzk1984 avatar Nov 08 '21 03:11 zzkzzk1984

根据您的报错信息是推理引擎接收的输入数据不对。需要在preprocess中print一下您的输入和处理后的输出,检查一下输出的数据和模型文件中生成的prototxt文件中的feed_var是否一致?

TeslaZhao avatar Nov 08 '21 03:11 TeslaZhao

image image

zzkzzk1984 avatar Nov 08 '21 06:11 zzkzzk1984

打印一下feed 每个成员的shape

TeslaZhao avatar Nov 09 '21 09:11 TeslaZhao

{'image': array([[[[175, 255, 255, ..., 237, 237, 237], [209, 255, 255, ..., 244, 244, 244], [255, 255, 255, ..., 253, 253, 253], ..., [249, 249, 249, ..., 249, 249, 249], [249, 249, 249, ..., 249, 249, 249], [249, 249, 249, ..., 249, 249, 249]],

    [[175, 255, 255, ..., 237, 237, 237],
     [209, 255, 255, ..., 244, 244, 244],
     [255, 255, 255, ..., 253, 253, 253],
     ...,
     [250, 250, 250, ..., 250, 250, 250],
     [250, 250, 250, ..., 250, 250, 250],
     [250, 250, 250, ..., 250, 250, 250]],

    [[175, 255, 255, ..., 237, 237, 237],
     [209, 255, 255, ..., 244, 244, 244],
     [255, 255, 255, ..., 253, 253, 253],
     ...,
     [252, 252, 252, ..., 252, 252, 252],
     [252, 252, 252, ..., 252, 252, 252],
     [252, 252, 252, ..., 252, 252, 252]]]], dtype=uint8), 'im_shape': array([   1,    3, 1280, 1280]), 'scale_factor': array([1., 1.])}

['save_infer_model/scale_0.tmp_1'] []

zzkzzk1984 avatar Nov 09 '21 09:11 zzkzzk1984

'im_shape': array([ 1, 3, 1280, 1280]), 'scale_factor': array([1., 1.])以及image的数据推测出的shape

与下图中的shape对不上 https://user-images.githubusercontent.com/23081513/140696704-0fedea8f-3e87-42a1-8cd0-45f48debc952.png

TeslaZhao avatar Nov 10 '21 11:11 TeslaZhao

楼上问题解决了吗?需要修改代码吗?在win10下跑上面的webservice和 client 端代码,报一样的错误啊,看服务端输出,跑完preprocess()函数,系统报错了,postprocess没有执行到

simonchf avatar May 31 '22 03:05 simonchf