mirror of
https://github.com/PaddlePaddle/PaddleOCR.git
synced 2025-12-29 07:58:41 +00:00
surpport ocr_system for cpp cice
This commit is contained in:
parent
b9c73d9a72
commit
611df3b613
@ -63,6 +63,7 @@ DEFINE_double(cls_thresh, 0.9, "Threshold of cls_thresh.");
|
||||
DEFINE_string(rec_model_dir, "", "Path of rec inference model.");
|
||||
DEFINE_int32(rec_batch_num, 1, "rec_batch_num.");
|
||||
DEFINE_string(char_list_file, "../../ppocr/utils/ppocr_keys_v1.txt", "Path of dictionary.");
|
||||
// DEFINE_string(char_list_file, "./ppocr/utils/ppocr_keys_v1.txt", "Path of dictionary.");
|
||||
|
||||
|
||||
using namespace std;
|
||||
@ -193,7 +194,7 @@ int main_system(std::vector<cv::String> cv_all_img_names) {
|
||||
for (int i = 0; i < cv_all_img_names.size(); ++i) {
|
||||
LOG(INFO) << "The predict img: " << cv_all_img_names[i];
|
||||
|
||||
cv::Mat srcimg = cv::imread(FLAGS_image_dir, cv::IMREAD_COLOR);
|
||||
cv::Mat srcimg = cv::imread(cv_all_img_names[i], cv::IMREAD_COLOR);
|
||||
if (!srcimg.data) {
|
||||
std::cerr << "[ERROR] image read failed! image path: " << cv_all_img_names[i] << endl;
|
||||
exit(1);
|
||||
|
||||
@ -62,7 +62,7 @@ inference:./deploy/cpp_infer/build/ppocr det
|
||||
--precision:fp32|fp16
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
--save_log_path:null
|
||||
null:null
|
||||
--benchmark:True
|
||||
===========================serving_params===========================
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
|
||||
@ -53,7 +53,7 @@ inference:tools/infer/predict_system.py
|
||||
use_opencv:True
|
||||
infer_model:./inference/ch_ppocr_mobile_v2.0_det_infer/
|
||||
infer_quant:False
|
||||
inference:./deploy/cpp_infer/build/ppocr det
|
||||
inference:./deploy/cpp_infer/build/ppocr system
|
||||
--use_gpu:True|False
|
||||
--enable_mkldnn:True|False
|
||||
--cpu_threads:1|6
|
||||
@ -62,6 +62,6 @@ inference:./deploy/cpp_infer/build/ppocr det
|
||||
--precision:fp32|fp16
|
||||
--det_model_dir:
|
||||
--image_dir:./inference/ch_det_data_50/all-sum-510/
|
||||
--save_log_path:null
|
||||
--rec_model_dir:./inference/ch_ppocr_mobile_v2.0_rec_infer/
|
||||
--benchmark:True
|
||||
|
||||
|
||||
@ -192,7 +192,8 @@ if [ ${MODE} = "cpp_infer" ]; then
|
||||
cpp_infer_model_key=$(func_parser_key "${lines[62]}")
|
||||
cpp_image_dir_key=$(func_parser_key "${lines[63]}")
|
||||
cpp_infer_img_dir=$(func_parser_value "${lines[63]}")
|
||||
cpp_save_log_key=$(func_parser_key "${lines[64]}")
|
||||
cpp_rec_model_key=$(func_parser_key "${lines[64]}")
|
||||
cpp_rec_model_value=$(func_parser_value "${lines[64]}")
|
||||
cpp_benchmark_key=$(func_parser_key "${lines[65]}")
|
||||
cpp_benchmark_value=$(func_parser_value "${lines[65]}")
|
||||
fi
|
||||
@ -368,7 +369,8 @@ function func_cpp_inference(){
|
||||
set_batchsize=$(func_set_params "${cpp_batch_size_key}" "${batch_size}")
|
||||
set_cpu_threads=$(func_set_params "${cpp_cpu_threads_key}" "${threads}")
|
||||
set_model_dir=$(func_set_params "${cpp_infer_model_key}" "${_model_dir}")
|
||||
command="${_script} ${cpp_use_gpu_key}=${use_gpu} ${cpp_use_mkldnn_key}=${use_mkldnn} ${set_cpu_threads} ${set_model_dir} ${set_batchsize} ${set_infer_data} ${set_benchmark} > ${_save_log_path} 2>&1 "
|
||||
set_infer_params1=$(func_set_params "${cpp_rec_model_key}" "${cpp_rec_model_value}")
|
||||
command="${_script} ${cpp_use_gpu_key}=${use_gpu} ${cpp_use_mkldnn_key}=${use_mkldnn} ${set_cpu_threads} ${set_model_dir} ${set_batchsize} ${set_infer_data} ${set_benchmark} ${set_infer_params1} > ${_save_log_path} 2>&1 "
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
@ -396,7 +398,8 @@ function func_cpp_inference(){
|
||||
set_tensorrt=$(func_set_params "${cpp_use_trt_key}" "${use_trt}")
|
||||
set_precision=$(func_set_params "${cpp_precision_key}" "${precision}")
|
||||
set_model_dir=$(func_set_params "${cpp_infer_model_key}" "${_model_dir}")
|
||||
command="${_script} ${cpp_use_gpu_key}=${use_gpu} ${set_tensorrt} ${set_precision} ${set_model_dir} ${set_batchsize} ${set_infer_data} ${set_benchmark} > ${_save_log_path} 2>&1 "
|
||||
set_infer_params1=$(func_set_params "${cpp_rec_model_key}" "${cpp_rec_model_value}")
|
||||
command="${_script} ${cpp_use_gpu_key}=${use_gpu} ${set_tensorrt} ${set_precision} ${set_model_dir} ${set_batchsize} ${set_infer_data} ${set_benchmark} ${set_infer_params1} > ${_save_log_path} 2>&1 "
|
||||
eval $command
|
||||
last_status=${PIPESTATUS[0]}
|
||||
eval "cat ${_save_log_path}"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user