From 362103bd0b7203263ca15f987ffaf30f1069cf53 Mon Sep 17 00:00:00 2001
From: zhangyubo0722 <94225063+zhangyubo0722@users.noreply.github.com>
Date: Sat, 28 Sep 2024 19:11:31 +0800
Subject: [PATCH] fix lateocr bug (#13920)

---
 ppocr/modeling/backbones/rec_resnetv2.py  | 4 ++++
 ppocr/modeling/heads/rec_latexocr_head.py | 3 +++
 ppocr/utils/export_model.py               | 1 +
 ppocr/utils/save_load.py                  | 6 +++++-
 4 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/ppocr/modeling/backbones/rec_resnetv2.py b/ppocr/modeling/backbones/rec_resnetv2.py
index ef4ea438e..476e3baa6 100644
--- a/ppocr/modeling/backbones/rec_resnetv2.py
+++ b/ppocr/modeling/backbones/rec_resnetv2.py
@@ -89,6 +89,8 @@ class StdConv2dSame(nn.Conv2D):
         self.eps = eps
 
     def forward(self, x):
+        if not self.training:
+            self.export = True
         if self.same_pad:
             if self.export:
                 x = pad_same_export(x, self._kernel_size, self._stride, self._dilation)
@@ -201,6 +203,8 @@ class MaxPool2dSame(nn.MaxPool2D):
         )
 
     def forward(self, x):
+        if not self.training:
+            self.export = True
         if self.export:
             x = pad_same_export(x, self.ksize, self.stride, value=-float("inf"))
         else:
diff --git a/ppocr/modeling/heads/rec_latexocr_head.py b/ppocr/modeling/heads/rec_latexocr_head.py
index 1484f87b1..cab1b8a5a 100644
--- a/ppocr/modeling/heads/rec_latexocr_head.py
+++ b/ppocr/modeling/heads/rec_latexocr_head.py
@@ -342,6 +342,8 @@ class Attention(nn.Layer):
         mem=None,
         seq_len=0,
     ):
+        if not self.training:
+            self.is_export = True
         b, n, _, h, talking_heads, collab_heads, has_context = (
             *x.shape,
             self.heads,
@@ -987,6 +989,7 @@ class LaTeXOCRHead(nn.Layer):
     # forward for export
     def forward(self, inputs, targets=None):
         if not self.training:
+            self.is_export = True
             encoded_feat = inputs
             batch_num = encoded_feat.shape[0]
             bos_tensor = paddle.full([batch_num, 1], self.bos_token, dtype=paddle.int64)
diff --git a/ppocr/utils/export_model.py b/ppocr/utils/export_model.py
index ce4c16e5c..a62e8109a 100644
--- a/ppocr/utils/export_model.py
+++ b/ppocr/utils/export_model.py
@@ -70,6 +70,7 @@ def dump_infer_config(config, path, logger):
             if hpi_config["Hpi"]["backend_config"].get("tensorrt", None):
                 hpi_config["Hpi"]["supported_backends"]["gpu"].remove("tensorrt")
                 del hpi_config["Hpi"]["backend_config"]["tensorrt"]
+            hpi_config["Hpi"]["selected_backends"]["gpu"] = "paddle_infer"
         infer_cfg["Hpi"] = hpi_config["Hpi"]
     if config["Global"].get("pdx_model_name", None):
         infer_cfg["Global"] = {}
diff --git a/ppocr/utils/save_load.py b/ppocr/utils/save_load.py
index f7ee43227..afd7c6ad9 100644
--- a/ppocr/utils/save_load.py
+++ b/ppocr/utils/save_load.py
@@ -304,6 +304,8 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
             metric_score = metric_info["metric"]["acc"]
         elif "precision" in metric_info["metric"]:
             metric_score = metric_info["metric"]["precision"]
+        elif "exp_rate" in metric_info["metric"]:
+            metric_score = metric_info["metric"]["exp_rate"]
         else:
             raise ValueError("No metric score found.")
         train_results["models"]["best"]["score"] = metric_score
@@ -326,8 +328,10 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
             metric_score = metric_info["metric"]["acc"]
         elif "precision" in metric_info["metric"]:
             metric_score = metric_info["metric"]["precision"]
+        elif "exp_rate" in metric_info["metric"]:
+            metric_score = metric_info["metric"]["exp_rate"]
         else:
-            raise ValueError("No metric score found.")
+            metric_score = 0
         train_results["models"][f"last_{1}"]["score"] = metric_score
         for tag in save_model_tag:
             train_results["models"][f"last_{1}"][tag] = os.path.join(