fix lateocr bug (#13920)
parent
cda3e1201e
commit
362103bd0b
|
@ -89,6 +89,8 @@ class StdConv2dSame(nn.Conv2D):
|
|||
self.eps = eps
|
||||
|
||||
def forward(self, x):
|
||||
if not self.training:
|
||||
self.export = True
|
||||
if self.same_pad:
|
||||
if self.export:
|
||||
x = pad_same_export(x, self._kernel_size, self._stride, self._dilation)
|
||||
|
@ -201,6 +203,8 @@ class MaxPool2dSame(nn.MaxPool2D):
|
|||
)
|
||||
|
||||
def forward(self, x):
|
||||
if not self.training:
|
||||
self.export = True
|
||||
if self.export:
|
||||
x = pad_same_export(x, self.ksize, self.stride, value=-float("inf"))
|
||||
else:
|
||||
|
|
|
@ -342,6 +342,8 @@ class Attention(nn.Layer):
|
|||
mem=None,
|
||||
seq_len=0,
|
||||
):
|
||||
if not self.training:
|
||||
self.is_export = True
|
||||
b, n, _, h, talking_heads, collab_heads, has_context = (
|
||||
*x.shape,
|
||||
self.heads,
|
||||
|
@ -987,6 +989,7 @@ class LaTeXOCRHead(nn.Layer):
|
|||
# forward for export
|
||||
def forward(self, inputs, targets=None):
|
||||
if not self.training:
|
||||
self.is_export = True
|
||||
encoded_feat = inputs
|
||||
batch_num = encoded_feat.shape[0]
|
||||
bos_tensor = paddle.full([batch_num, 1], self.bos_token, dtype=paddle.int64)
|
||||
|
|
|
@ -70,6 +70,7 @@ def dump_infer_config(config, path, logger):
|
|||
if hpi_config["Hpi"]["backend_config"].get("tensorrt", None):
|
||||
hpi_config["Hpi"]["supported_backends"]["gpu"].remove("tensorrt")
|
||||
del hpi_config["Hpi"]["backend_config"]["tensorrt"]
|
||||
hpi_config["Hpi"]["selected_backends"]["gpu"] = "paddle_infer"
|
||||
infer_cfg["Hpi"] = hpi_config["Hpi"]
|
||||
if config["Global"].get("pdx_model_name", None):
|
||||
infer_cfg["Global"] = {}
|
||||
|
|
|
@ -304,6 +304,8 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
|
|||
metric_score = metric_info["metric"]["acc"]
|
||||
elif "precision" in metric_info["metric"]:
|
||||
metric_score = metric_info["metric"]["precision"]
|
||||
elif "exp_rate" in metric_info["metric"]:
|
||||
metric_score = metric_info["metric"]["exp_rate"]
|
||||
else:
|
||||
raise ValueError("No metric score found.")
|
||||
train_results["models"]["best"]["score"] = metric_score
|
||||
|
@ -326,8 +328,10 @@ def update_train_results(config, prefix, metric_info, done_flag=False, last_num=
|
|||
metric_score = metric_info["metric"]["acc"]
|
||||
elif "precision" in metric_info["metric"]:
|
||||
metric_score = metric_info["metric"]["precision"]
|
||||
elif "exp_rate" in metric_info["metric"]:
|
||||
metric_score = metric_info["metric"]["exp_rate"]
|
||||
else:
|
||||
raise ValueError("No metric score found.")
|
||||
metric_score = 0
|
||||
train_results["models"][f"last_{1}"]["score"] = metric_score
|
||||
for tag in save_model_tag:
|
||||
train_results["models"][f"last_{1}"][tag] = os.path.join(
|
||||
|
|
Loading…
Reference in New Issue