修复ips和batch_cost输出错误的bug

This commit is contained in:
WenmuZhou 2020-11-24 15:47:12 +08:00
parent 0c287c41ea
commit 40023f76a6

View File

@ -236,7 +236,6 @@ def train(config,
train_batch_cost = 0.0 train_batch_cost = 0.0
train_reader_cost = 0.0 train_reader_cost = 0.0
batch_sum = 0 batch_sum = 0
batch_start = time.time()
# eval # eval
if global_step > start_eval_step and \ if global_step > start_eval_step and \
(global_step - start_eval_step) % eval_batch_step == 0 and dist.get_rank() == 0: (global_step - start_eval_step) % eval_batch_step == 0 and dist.get_rank() == 0:
@ -275,6 +274,7 @@ def train(config,
best_model_dict[main_indicator], best_model_dict[main_indicator],
global_step) global_step)
global_step += 1 global_step += 1
batch_start = time.time()
if dist.get_rank() == 0: if dist.get_rank() == 0:
save_model( save_model(
model, model,
@ -334,17 +334,19 @@ def eval(model, valid_dataloader, post_process_class, eval_class):
def save_inference_mode(model, config, logger): def save_inference_mode(model, config, logger):
model.eval() if dist.get_rank() == 0:
save_path = '{}/infer/{}'.format(config['Global']['save_model_dir'], model.eval()
config['Architecture']['model_type']) print('infer')
if config['Architecture']['model_type'] == 'rec': save_path = '{}/infer/{}'.format(config['Global']['save_model_dir'],
input_shape = [None, 3, 32, None] config['Architecture']['model_type'])
jit_model = paddle.jit.to_static( if config['Architecture']['model_type'] == 'rec':
model, input_spec=[paddle.static.InputSpec(input_shape)]) input_shape = [None, 3, 32, None]
paddle.jit.save(jit_model, save_path) jit_model = paddle.jit.to_static(
logger.info('inference model save to {}'.format(save_path)) model, input_spec=[paddle.static.InputSpec(input_shape)])
paddle.jit.save(jit_model, save_path)
logger.info('inference model save to {}'.format(save_path))
model.train() model.train()
def preprocess(): def preprocess():