commit
b8b5faaf0d
|
@ -1,6 +1,6 @@
|
||||||
Global:
|
Global:
|
||||||
use_gpu: True
|
use_gpu: True
|
||||||
epoch_num: 300
|
epoch_num: 60
|
||||||
log_smooth_window: 20
|
log_smooth_window: 20
|
||||||
print_batch_step: 50
|
print_batch_step: 50
|
||||||
save_model_dir: ./output/kie_5/
|
save_model_dir: ./output/kie_5/
|
||||||
|
@ -13,7 +13,7 @@ Global:
|
||||||
# you should set load_static_weights as False.
|
# you should set load_static_weights as False.
|
||||||
load_static_weights: False
|
load_static_weights: False
|
||||||
cal_metric_during_train: False
|
cal_metric_during_train: False
|
||||||
pretrained_model: ./output/kie_4/best_accuracy
|
pretrained_model:
|
||||||
checkpoints:
|
checkpoints:
|
||||||
save_inference_dir:
|
save_inference_dir:
|
||||||
use_visualdl: False
|
use_visualdl: False
|
||||||
|
|
|
@ -33,8 +33,9 @@ import paddle
|
||||||
|
|
||||||
from ppocr.data import create_operators, transform
|
from ppocr.data import create_operators, transform
|
||||||
from ppocr.modeling.architectures import build_model
|
from ppocr.modeling.architectures import build_model
|
||||||
from ppocr.utils.save_load import init_model
|
from ppocr.utils.save_load import load_model
|
||||||
import tools.program as program
|
import tools.program as program
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
def read_class_list(filepath):
|
def read_class_list(filepath):
|
||||||
|
@ -80,7 +81,8 @@ def draw_kie_result(batch, node, idx_to_cls, count):
|
||||||
vis_img = np.ones((h, w * 3, 3), dtype=np.uint8) * 255
|
vis_img = np.ones((h, w * 3, 3), dtype=np.uint8) * 255
|
||||||
vis_img[:, :w] = img
|
vis_img[:, :w] = img
|
||||||
vis_img[:, w:] = pred_img
|
vis_img[:, w:] = pred_img
|
||||||
save_kie_path = os.path.dirname(config['Global']['save_res_path']) + "/kie_results/"
|
save_kie_path = os.path.dirname(config['Global'][
|
||||||
|
'save_res_path']) + "/kie_results/"
|
||||||
if not os.path.exists(save_kie_path):
|
if not os.path.exists(save_kie_path):
|
||||||
os.makedirs(save_kie_path)
|
os.makedirs(save_kie_path)
|
||||||
save_path = os.path.join(save_kie_path, str(count) + ".png")
|
save_path = os.path.join(save_kie_path, str(count) + ".png")
|
||||||
|
@ -93,7 +95,7 @@ def main():
|
||||||
|
|
||||||
# build model
|
# build model
|
||||||
model = build_model(config['Architecture'])
|
model = build_model(config['Architecture'])
|
||||||
init_model(config, model, logger)
|
load_model(config, model)
|
||||||
|
|
||||||
# create data ops
|
# create data ops
|
||||||
transforms = []
|
transforms = []
|
||||||
|
@ -111,10 +113,15 @@ def main():
|
||||||
os.makedirs(os.path.dirname(save_res_path))
|
os.makedirs(os.path.dirname(save_res_path))
|
||||||
|
|
||||||
model.eval()
|
model.eval()
|
||||||
|
|
||||||
|
warmup_times = 0
|
||||||
|
count_t = []
|
||||||
with open(save_res_path, "wb") as fout:
|
with open(save_res_path, "wb") as fout:
|
||||||
with open(config['Global']['infer_img'], "rb") as f:
|
with open(config['Global']['infer_img'], "rb") as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
for index, data_line in enumerate(lines):
|
for index, data_line in enumerate(lines):
|
||||||
|
if index == 10:
|
||||||
|
warmup_t = time.time()
|
||||||
data_line = data_line.decode('utf-8')
|
data_line = data_line.decode('utf-8')
|
||||||
substr = data_line.strip("\n").split("\t")
|
substr = data_line.strip("\n").split("\t")
|
||||||
img_path, label = data_dir + "/" + substr[0], substr[1]
|
img_path, label = data_dir + "/" + substr[0], substr[1]
|
||||||
|
@ -122,16 +129,23 @@ def main():
|
||||||
with open(data['img_path'], 'rb') as f:
|
with open(data['img_path'], 'rb') as f:
|
||||||
img = f.read()
|
img = f.read()
|
||||||
data['image'] = img
|
data['image'] = img
|
||||||
|
st = time.time()
|
||||||
batch = transform(data, ops)
|
batch = transform(data, ops)
|
||||||
batch_pred = [0] * len(batch)
|
batch_pred = [0] * len(batch)
|
||||||
for i in range(len(batch)):
|
for i in range(len(batch)):
|
||||||
batch_pred[i] = paddle.to_tensor(
|
batch_pred[i] = paddle.to_tensor(
|
||||||
np.expand_dims(
|
np.expand_dims(
|
||||||
batch[i], axis=0))
|
batch[i], axis=0))
|
||||||
|
st = time.time()
|
||||||
node, edge = model(batch_pred)
|
node, edge = model(batch_pred)
|
||||||
node = F.softmax(node, -1)
|
node = F.softmax(node, -1)
|
||||||
|
count_t.append(time.time() - st)
|
||||||
draw_kie_result(batch, node, idx_to_cls, index)
|
draw_kie_result(batch, node, idx_to_cls, index)
|
||||||
logger.info("success!")
|
logger.info("success!")
|
||||||
|
logger.info("It took {} s for predict {} images.".format(
|
||||||
|
np.sum(count_t), len(count_t)))
|
||||||
|
ips = len(count_t[warmup_times:]) / np.sum(count_t[warmup_times:])
|
||||||
|
logger.info("The ips is {} images/s".format(ips))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
Loading…
Reference in New Issue