remove gpu infer path in train_fleet_config, remove redundant config 'MobileNetV3_large_x1_0_lite_arm_cpu_cpp.txt' in PPLCNet
parent
05c393d938
commit
3569d12656
|
@ -38,7 +38,7 @@ infer_model:../inference/
|
|||
infer_export:True
|
||||
infer_quant:Fasle
|
||||
inference:python/predict_rec.py -c configs/inference_rec.yaml
|
||||
-o Global.use_gpu:True|False
|
||||
-o Global.use_gpu:False
|
||||
-o Global.enable_mkldnn:True|False
|
||||
-o Global.cpu_num_threads:1|6
|
||||
-o Global.batch_size:1|16
|
||||
|
|
|
@ -38,7 +38,7 @@ infer_model:../inference/
|
|||
infer_export:True
|
||||
infer_quant:Fasle
|
||||
inference:python/predict_cls.py -c configs/inference_cls.yaml -o PreProcess.transform_ops.0.ResizeImage.resize_short=236
|
||||
-o Global.use_gpu:True|False
|
||||
-o Global.use_gpu:False
|
||||
-o Global.enable_mkldnn:True|False
|
||||
-o Global.cpu_num_threads:1|6
|
||||
-o Global.batch_size:1|16
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
runtime_device:arm_cpu
|
||||
lite_arm_work_path:/data/local/tmp/arm_cpu/
|
||||
lite_arm_so_path:inference_lite_lib.android.armv8/cxx/lib/libpaddle_light_api_shared.so
|
||||
clas_model_file:MobileNetV3_large_x1_0
|
||||
inference_cmd:clas_system config.txt tabby_cat.jpg
|
||||
--num_threads_list:1
|
||||
--batch_size_list:1
|
||||
--precision_list:FP32
|
|
@ -38,7 +38,7 @@ infer_model:../inference/
|
|||
infer_export:True
|
||||
infer_quant:Fasle
|
||||
inference:python/predict_cls.py -c configs/inference_cls.yaml
|
||||
-o Global.use_gpu:True|False
|
||||
-o Global.use_gpu:False
|
||||
-o Global.enable_mkldnn:True|False
|
||||
-o Global.cpu_num_threads:1|6
|
||||
-o Global.batch_size:1|16
|
||||
|
|
|
@ -38,7 +38,7 @@ infer_model:../inference/
|
|||
infer_export:True
|
||||
infer_quant:Fasle
|
||||
inference:python/predict_cls.py -c configs/inference_cls.yaml
|
||||
-o Global.use_gpu:True|False
|
||||
-o Global.use_gpu:False
|
||||
-o Global.enable_mkldnn:True|False
|
||||
-o Global.cpu_num_threads:1|6
|
||||
-o Global.batch_size:1|16
|
||||
|
|
Loading…
Reference in New Issue