mirror of
https://github.com/PaddlePaddle/PaddleOCR.git
synced 2025-10-26 23:34:54 +00:00
rename train_log
This commit is contained in:
parent
19c5ab238e
commit
e9e8f40927
@ -327,14 +327,15 @@ else
|
||||
save_log="${LOG_PATH}/${model_name}_gpus_${gpu}_autocast_${autocast}_bs_${train_batch_value}_mp"
|
||||
fi
|
||||
|
||||
btrain_log="${LOG_PATH}/benchmark_train/${model_name}_bs${train_batch_value}_${autocast}"
|
||||
|
||||
set_save_model=$(func_set_params "${save_model_key}" "${save_log}")
|
||||
if [ ${#gpu} -le 2 ];then # train with cpu or single gpu
|
||||
cmd="${python} ${run_train} ${set_use_gpu} ${set_save_model} ${set_epoch} ${set_pretrain} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config} "
|
||||
cmd="${python} ${run_train} ${set_use_gpu} ${set_save_model} ${set_epoch} ${set_pretrain} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config} > ${btrain_log} 2>&1 "
|
||||
elif [ ${#ips} -le 26 ];then # train with multi-gpu
|
||||
cmd="${python} -m paddle.distributed.launch --gpus=${gpu} ${run_train} ${set_use_gpu} ${set_save_model} ${set_epoch} ${set_pretrain} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config}"
|
||||
cmd="${python} -m paddle.distributed.launch --gpus=${gpu} ${run_train} ${set_use_gpu} ${set_save_model} ${set_epoch} ${set_pretrain} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config} > ${btrain_log} 2>&1 "
|
||||
else # train with multi-machine
|
||||
cmd="${python} -m paddle.distributed.launch --ips=${ips} --gpus=${gpu} ${run_train} ${set_use_gpu} ${set_save_model} ${set_pretrain} ${set_epoch} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config}"
|
||||
cmd="${python} -m paddle.distributed.launch --ips=${ips} --gpus=${gpu} ${run_train} ${set_use_gpu} ${set_save_model} ${set_pretrain} ${set_epoch} ${set_autocast} ${set_batchsize} ${set_train_params1} ${set_amp_config} > ${btrain_log} 2>&1 "
|
||||
fi
|
||||
# run train
|
||||
eval $cmd
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user