fix lateocr bug (#13920)

pull/13932/head
zhangyubo0722 2024-09-28 19:11:31 +08:00 committed by GitHub
parent cda3e1201e
commit 362103bd0b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 13 additions and 1 deletions

View File

@ -89,6 +89,8 @@ class StdConv2dSame(nn.Conv2D):
self.eps = eps
def forward(self, x):
if not self.training:
self.export = True
if self.same_pad:
if self.export:
x = pad_same_export(x, self._kernel_size, self._stride, self._dilation)
@ -201,6 +203,8 @@ class MaxPool2dSame(nn.MaxPool2D):
)
def forward(self, x):
if not self.training:
self.export = True
if self.export:
x = pad_same_export(x, self.ksize, self.stride, value=-float("inf"))
else:

View File

@ -342,6 +342,8 @@ class Attention(nn.Layer):
mem=None,
seq_len=0,
):
if not self.training:
self.is_export = True
b, n, _, h, talking_heads, collab_heads, has_context = (
*x.shape,
self.heads,
@ -987,6 +989,7 @@ class LaTeXOCRHead(nn.Layer):
# forward for export
def forward(self, inputs, targets=None):
if not self.training:
self.is_export = True
encoded_feat = inputs
batch_num = encoded_feat.shape[0]
bos_tensor = paddle.full([batch_num, 1], self.bos_token, dtype=paddle.int64)

View File

@ -70,6 +70,7 @@ def dump_infer_config(config, path, logger):
if hpi_config["Hpi"]["backend_config"].get("tensorrt", None):
hpi_config["Hpi"]["supported_backends"]["gpu"].remove("tensorrt")
del hpi_config["Hpi"]["backend_config"]["tensorrt"]
hpi_config["Hpi"]["selected_backends"]["gpu"] = "paddle_infer"
infer_cfg["Hpi"] = hpi_config["Hpi"]
if config["Global"].get("pdx_model_name", None):
infer_cfg["Global"] = {}

View File

@ -304,6 +304,8 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
metric_score = metric_info["metric"]["acc"]
elif "precision" in metric_info["metric"]:
metric_score = metric_info["metric"]["precision"]
elif "exp_rate" in metric_info["metric"]:
metric_score = metric_info["metric"]["exp_rate"]
else:
raise ValueError("No metric score found.")
train_results["models"]["best"]["score"] = metric_score
@ -326,8 +328,10 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
metric_score = metric_info["metric"]["acc"]
elif "precision" in metric_info["metric"]:
metric_score = metric_info["metric"]["precision"]
elif "exp_rate" in metric_info["metric"]:
metric_score = metric_info["metric"]["exp_rate"]
else:
raise ValueError("No metric score found.")
metric_score = 0
train_results["models"][f"last_{1}"]["score"] = metric_score
for tag in save_model_tag:
train_results["models"][f"last_{1}"][tag] = os.path.join(