diff --git a/.gitignore b/.gitignore
index 3a05fb746..3300be325 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,4 +31,4 @@ paddleocr.egg-info/
 /deploy/android_demo/app/.cxx/
 /deploy/android_demo/app/cache/
 test_tipc/web/models/
-test_tipc/web/node_modules/
\ No newline at end of file
+test_tipc/web/node_modules/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3c26460ba..4121e4a65 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,26 +1,22 @@
 repos:
 -   repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: a11d9314b22d8f8c7556443875b731ef05965464
+    rev: v4.6.0
     hooks:
+    -   id: check-added-large-files
+        args: ['--maxkb=512']
+    -   id: check-case-conflict
     -   id: check-merge-conflict
     -   id: check-symlinks
     -   id: detect-private-key
-        files: (?!.*paddle)^.*$
     -   id: end-of-file-fixer
-        files: \.md$
     -   id: trailing-whitespace
-        files: \.md$
+        files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|py|md)$
 -   repo: https://github.com/Lucas-C/pre-commit-hooks
-    rev: v1.0.1
+    rev: v1.5.1
     hooks:
-    -   id: forbid-crlf
-        files: \.md$
     -   id: remove-crlf
-        files: \.md$
-    -   id: forbid-tabs
-        files: \.md$
     -   id: remove-tabs
-        files: \.md$
+        files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|py|md)$
 -   repo: local
     hooks:
     -   id: clang-format
@@ -31,7 +27,7 @@ repos:
         files: \.(c|cc|cxx|cpp|cu|h|hpp|hxx|cuh|proto)$
 # For Python files
 -   repo: https://github.com/psf/black.git
-    rev: 23.3.0
+    rev: 24.4.2
     hooks:
     -   id: black
         files: (.*\.(py|pyi|bzl)|BUILD|.*\.BUILD|WORKSPACE)$
@@ -47,4 +43,3 @@ repos:
             - --show-source
             - --statistics
         exclude: ^benchmark/|^test_tipc/
-
diff --git a/MANIFEST.in b/MANIFEST.in
index f821618ab..a72b3728a 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -7,4 +7,4 @@ recursive-include ppocr/postprocess *.py
 recursive-include tools/infer *.py
 recursive-include tools __init__.py
 recursive-include ppocr/utils/e2e_utils *.py
-recursive-include ppstructure *.py
\ No newline at end of file
+recursive-include ppstructure *.py
diff --git a/README.md b/README.md
index 28708236c..e0b88b069 100644
--- a/README.md
+++ b/README.md
@@ -207,12 +207,12 @@ PaddleOCR is being oversight by a [PMC](https://github.com/PaddlePaddle/PaddleOC
 <details open>
 <summary>PP-Structure 文档分析</summary>
 
-- 版面分析+表格识别  
+- 版面分析+表格识别
 <div align="center">
     <img src="./ppstructure/docs/table/ppstructure.GIF" width="800">
 </div>
 
-- SER(语义实体识别)  
+- SER(语义实体识别)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185310636-6ce02f7c-790d-479f-b163-ea97a5a04808.jpg" width="600">
 </div>
diff --git a/README_en.md b/README_en.md
index be6551293..93b8e0fe6 100644
--- a/README_en.md
+++ b/README_en.md
@@ -119,11 +119,11 @@ PaddleOCR support a variety of cutting-edge algorithms related to OCR, and devel
         - [Mobile](./deploy/lite/readme.md)
         - [Paddle2ONNX](./deploy/paddle2onnx/readme.md)
         - [PaddleCloud](./deploy/paddlecloud/README.md)
-        - [Benchmark](./doc/doc_en/benchmark_en.md)  
+        - [Benchmark](./doc/doc_en/benchmark_en.md)
 - [PP-Structure 🔥](./ppstructure/README.md)
     - [Quick Start](./ppstructure/docs/quickstart_en.md)
     - [Model Zoo](./ppstructure/docs/models_list_en.md)
-    - [Model training](./doc/doc_en/training_en.md)  
+    - [Model training](./doc/doc_en/training_en.md)
         - [Layout Analysis](./ppstructure/layout/README.md)
         - [Table Recognition](./ppstructure/table/README.md)
         - [Key Information Extraction](./ppstructure/kie/README.md)
@@ -136,7 +136,7 @@ PaddleOCR support a variety of cutting-edge algorithms related to OCR, and devel
     - [Text recognition](./doc/doc_en/algorithm_overview_en.md)
     - [End-to-end OCR](./doc/doc_en/algorithm_overview_en.md)
     - [Table Recognition](./doc/doc_en/algorithm_overview_en.md)
-    - [Key Information Extraction](./doc/doc_en/algorithm_overview_en.md)  
+    - [Key Information Extraction](./doc/doc_en/algorithm_overview_en.md)
     - [Add New Algorithms to PaddleOCR](./doc/doc_en/add_new_algorithm_en.md)
 - Data Annotation and Synthesis
     - [Semi-automatic Annotation Tool: PPOCRLabel](https://github.com/PFCCLab/PPOCRLabel/blob/main/README.md)
@@ -188,7 +188,7 @@ PaddleOCR support a variety of cutting-edge algorithms related to OCR, and devel
 <details open>
 <summary>PP-StructureV2</summary>
 
-- layout analysis + table recognition  
+- layout analysis + table recognition
 <div align="center">
     <img src="./ppstructure/docs/table/ppstructure.GIF" width="800">
 </div>
@@ -209,7 +209,7 @@ PaddleOCR support a variety of cutting-edge algorithms related to OCR, and devel
 - RE (Relation Extraction)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
diff --git a/applications/PCB字符识别/PCB字符识别.md b/applications/PCB字符识别/PCB字符识别.md
index 4b4efe473..b16d54e53 100644
--- a/applications/PCB字符识别/PCB字符识别.md
+++ b/applications/PCB字符识别/PCB字符识别.md
@@ -546,7 +546,7 @@ python3 tools/infer/predict_system.py  \
     --use_gpu=True
 ```
 
-得到保存结果,文本检测识别可视化图保存在`det_rec_infer/`目录下,预测结果保存在`det_rec_infer/system_results.txt`中,格式如下:`0018.jpg	[{"transcription": "E295", "points": [[88, 33], [137, 33], [137, 40], [88, 40]]}]`
+得到保存结果,文本检测识别可视化图保存在`det_rec_infer/`目录下,预测结果保存在`det_rec_infer/system_results.txt`中,格式如下:`0018.jpg   [{"transcription": "E295", "points": [[88, 33], [137, 33], [137, 40], [88, 40]]}]`
 
 2)然后将步骤一保存的数据转换为端对端评测需要的数据格式: 修改 `tools/end2end/convert_ppocr_label.py`中的代码,convert_label函数中设置输入标签路径,Mode,保存标签路径等,对预测数据的GTlabel和预测结果的label格式进行转换。
 ```
diff --git a/applications/PCB字符识别/gen_data/corpus/text.txt b/applications/PCB字符识别/gen_data/corpus/text.txt
index 8b8cb793e..ef40e9cdc 100644
--- a/applications/PCB字符识别/gen_data/corpus/text.txt
+++ b/applications/PCB字符识别/gen_data/corpus/text.txt
@@ -27,4 +27,4 @@ K06
 KIEY
 NZQJ
 UN1B
-6X4
\ No newline at end of file
+6X4
diff --git a/applications/中文表格识别.md b/applications/中文表格识别.md
index d61514ff2..3ed72d20e 100644
--- a/applications/中文表格识别.md
+++ b/applications/中文表格识别.md
@@ -456,7 +456,7 @@ display(HTML('<html><body><table><tr><td colspan="5">alleadersh</td><td rowspan=
 
 预测结果如下:
 ```
-val_9.jpg:	 {'attributes': ['Scanned', 'Little', 'Black-and-White', 'Clear', 'Without-Obstacles', 'Horizontal'], 'output': [1, 1, 1, 1, 1, 1]}
+val_9.jpg:   {'attributes': ['Scanned', 'Little', 'Black-and-White', 'Clear', 'Without-Obstacles', 'Horizontal'], 'output': [1, 1, 1, 1, 1, 1]}
 ```
 
 
@@ -466,7 +466,7 @@ val_9.jpg:	 {'attributes': ['Scanned', 'Little', 'Black-and-White', 'Clear', 'Wi
 
 预测结果如下:
 ```
-val_3253.jpg:	 {'attributes': ['Photo', 'Little', 'Black-and-White', 'Blurry', 'Without-Obstacles', 'Tilted'], 'output': [0, 1, 1, 0, 1, 0]}
+val_3253.jpg:    {'attributes': ['Photo', 'Little', 'Black-and-White', 'Blurry', 'Without-Obstacles', 'Tilted'], 'output': [0, 1, 1, 0, 1, 0]}
 ```
 
 对比两张图片可以发现,第一张图片比较清晰,表格属性的结果也偏向于比较容易识别,我们可以更相信表格识别的结果,第二张图片比较模糊,且存在倾斜现象,表格识别可能存在错误,需要我们人工进一步校验。通过表格的属性识别能力,可以进一步将“人工”和“智能”很好的结合起来,为表格识别能力的落地的精度提供保障。
diff --git a/applications/光功率计数码管字符识别/光功率计数码管字符识别.md b/applications/光功率计数码管字符识别/光功率计数码管字符识别.md
index 215b308d3..4e6e7acd5 100644
--- a/applications/光功率计数码管字符识别/光功率计数码管字符识别.md
+++ b/applications/光功率计数码管字符识别/光功率计数码管字符识别.md
@@ -434,16 +434,16 @@ python3 -m paddle.distributed.launch --gpus '0' tools/eval.py -c configs/rec/PP-
 
 ```
 output/rec/
-├── best_accuracy.pdopt  
-├── best_accuracy.pdparams  
-├── best_accuracy.states  
-├── config.yml  
-├── iter_epoch_3.pdopt  
-├── iter_epoch_3.pdparams  
-├── iter_epoch_3.states  
-├── latest.pdopt  
-├── latest.pdparams  
-├── latest.states  
+├── best_accuracy.pdopt
+├── best_accuracy.pdparams
+├── best_accuracy.states
+├── config.yml
+├── iter_epoch_3.pdopt
+├── iter_epoch_3.pdparams
+├── iter_epoch_3.states
+├── latest.pdopt
+├── latest.pdparams
+├── latest.states
 └── train.log
 ```
 
diff --git a/applications/包装生产日期识别.md b/applications/包装生产日期识别.md
index 670ec9cda..bc833687e 100644
--- a/applications/包装生产日期识别.md
+++ b/applications/包装生产日期识别.md
@@ -243,7 +243,7 @@ def get_cropus(f):
     elif 0.7 < rad < 0.8:
         f.write('20{:02d}-{:02d}-{:02d}'.format(year, month, day))
     elif 0.8 < rad < 0.9:
-        f.write('20{:02d}.{:02d}.{:02d}'.format(year, month, day))  
+        f.write('20{:02d}.{:02d}.{:02d}'.format(year, month, day))
     else:
         f.write('{:02d}:{:02d}:{:02d} {:02d}'.format(hours, minute, second, file_id2))
 
diff --git a/applications/印章弯曲文字识别.md b/applications/印章弯曲文字识别.md
index bc0eaa35d..5e230cf02 100644
--- a/applications/印章弯曲文字识别.md
+++ b/applications/印章弯曲文字识别.md
@@ -409,7 +409,7 @@ def crop_seal_from_img(label_file, data_dir, save_dir, save_gt_path):
 
 
 
-if __name__ == "__main__":  
+if __name__ == "__main__":
 
     # 数据处理
     gen_extract_label("./seal_labeled_datas", "./seal_labeled_datas/Label.txt", "./seal_ppocr_gt/seal_det_img.txt", "./seal_ppocr_gt/seal_ppocr_img.txt")
@@ -523,7 +523,7 @@ def gen_xml_label(mode='train'):
         xml_file = open(("./seal_VOC/Annotations" + '/' + i_name + '.xml'), 'w')
         xml_file.write('<annotation>\n')
         xml_file.write('    <folder>seal_VOC</folder>\n')
-        xml_file.write('    <filename>' + str(img_name) + '</filename>\n')  
+        xml_file.write('    <filename>' + str(img_name) + '</filename>\n')
         xml_file.write('    <path>' + 'Annotations/' + str(img_name) + '</path>\n')
         xml_file.write('    <size>\n')
         xml_file.write('        <width>' + str(width) + '</width>\n')
@@ -553,7 +553,7 @@ def gen_xml_label(mode='train'):
             xml_file.write('            <ymax>'+str(ymax)+'</ymax>\n')
             xml_file.write('        </bndbox>\n')
             xml_file.write('    </object>\n')
-        xml_file.write('</annotation>')  
+        xml_file.write('</annotation>')
         xml_file.close()
     print(f'{mode} xml save done!')
 
diff --git a/applications/多模态表单识别.md b/applications/多模态表单识别.md
index 59aaf72b7..fd403bb84 100644
--- a/applications/多模态表单识别.md
+++ b/applications/多模态表单识别.md
@@ -110,12 +110,12 @@ tar -xf XFUND.tar
 
 ```bash
 /home/aistudio/PaddleOCR/ppstructure/vqa/XFUND
-  └─ zh_train/        		 	训练集
-      ├── image/				图片存放文件夹
-      ├── xfun_normalize_train.json 	标注信息
-  └─ zh_val/        		 	验证集
-      ├── image/			图片存放文件夹
-      ├── xfun_normalize_val.json 	标注信息
+  └─ zh_train/                  训练集
+      ├── image/              图片存放文件夹
+      ├── xfun_normalize_train.json   标注信息
+  └─ zh_val/                    验证集
+      ├── image/          图片存放文件夹
+      ├── xfun_normalize_val.json     标注信息
 
 ```
 
@@ -805,7 +805,7 @@ CUDA_VISIBLE_DEVICES=0 python3 tools/infer_vqa_token_ser_re.py \
 最终会在config.Global.save_res_path字段所配置的目录下保存预测结果可视化图像以及预测结果文本文件,预测结果文本文件名为infer_results.txt, 每一行表示一张图片的结果,每张图片的结果如下所示,前面表示测试图片路径,后面为测试结果:key字段及对应的value字段。
 
 ```
-test_imgs/t131.jpg	{"政治面税": "群众", "性别": "男", "籍贯": "河北省邯郸市", "婚姻状况": "亏末婚口已婚口已娇", "通讯地址": "邯郸市阳光苑7号楼003", "民族": "汉族", "毕业院校": "河南工业大学", "户口性质": "口农村城镇", "户口地址": "河北省邯郸市", "联系电话": "13288888888", "健康状况": "健康", "姓名": "小六", "好高cm": "180", "出生年月": "1996年8月9日", "文化程度": "本科", "身份证号码": "458933777777777777"}
+test_imgs/t131.jpg  {"政治面税": "群众", "性别": "男", "籍贯": "河北省邯郸市", "婚姻状况": "亏末婚口已婚口已娇", "通讯地址": "邯郸市阳光苑7号楼003", "民族": "汉族", "毕业院校": "河南工业大学", "户口性质": "口农村城镇", "户口地址": "河北省邯郸市", "联系电话": "13288888888", "健康状况": "健康", "姓名": "小六", "好高cm": "180", "出生年月": "1996年8月9日", "文化程度": "本科", "身份证号码": "458933777777777777"}
 ````
 
 展示预测结果
diff --git a/applications/快速构建卡证类OCR.md b/applications/快速构建卡证类OCR.md
index 79266c6c2..50b70ff3a 100644
--- a/applications/快速构建卡证类OCR.md
+++ b/applications/快速构建卡证类OCR.md
@@ -1,775 +1,775 @@
-# 快速构建卡证类OCR
-
-
-- [快速构建卡证类OCR](#快速构建卡证类ocr)
-  - [1. 金融行业卡证识别应用](#1-金融行业卡证识别应用)
-    - [1.1 金融行业中的OCR相关技术](#11-金融行业中的ocr相关技术)
-    - [1.2 金融行业中的卡证识别场景介绍](#12-金融行业中的卡证识别场景介绍)
-    - [1.3 OCR落地挑战](#13-ocr落地挑战)
-  - [2. 卡证识别技术解析](#2-卡证识别技术解析)
-    - [2.1 卡证分类模型](#21-卡证分类模型)
-    - [2.2 卡证识别模型](#22-卡证识别模型)
-  - [3. OCR技术拆解](#3-ocr技术拆解)
-    - [3.1技术流程](#31技术流程)
-    - [3.2 OCR技术拆解---卡证分类](#32-ocr技术拆解---卡证分类)
-      - [卡证分类:数据、模型准备](#卡证分类数据模型准备)
-      - [卡证分类---修改配置文件](#卡证分类---修改配置文件)
-      - [卡证分类---训练](#卡证分类---训练)
-    - [3.2 OCR技术拆解---卡证识别](#32-ocr技术拆解---卡证识别)
-      - [身份证识别:检测+分类](#身份证识别检测分类)
-      - [数据标注](#数据标注)
-  - [4 . 项目实践](#4--项目实践)
-    - [4.1 环境准备](#41-环境准备)
-    - [4.2 配置文件修改](#42-配置文件修改)
-    - [4.3 代码修改](#43-代码修改)
-      - [4.3.1 数据读取](#431-数据读取)
-      - [4.3.2  head修改](#432--head修改)
-      - [4.3.3 修改loss](#433-修改loss)
-      - [4.3.4 后处理](#434-后处理)
-    - [4.4. 模型启动](#44-模型启动)
-  - [5 总结](#5-总结)
-  - [References](#references)
-
-## 1. 金融行业卡证识别应用
-
-### 1.1 金融行业中的OCR相关技术
-
-* 《“十四五”数字经济发展规划》指出,2020年我国数字经济核心产业增加值占GDP比重达7.8%,随着数字经济迈向全面扩展,到2025年该比例将提升至10%。
-
-* 在过去数年的跨越发展与积累沉淀中,数字金融、金融科技已在对金融业的重塑与再造中充分印证了其自身价值。
-
-* 以智能为目标,提升金融数字化水平,实现业务流程自动化,降低人力成本。
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/8bb381f164c54ea9b4043cf66fc92ffdea8aaf851bab484fa6e19bd2f93f154f)
-
-
-
-### 1.2 金融行业中的卡证识别场景介绍
-
-应用场景:身份证、银行卡、营业执照、驾驶证等。
-
-应用难点:由于数据的采集来源多样,以及实际采集数据各种噪声:反光、褶皱、模糊、倾斜等各种问题干扰。
-
-![](https://ai-studio-static-online.cdn.bcebos.com/981640e17d05487e961162f8576c9e11634ca157f79048d4bd9d3bc21722afe8)
-
-
-
-### 1.3 OCR落地挑战
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/a5973a8ddeff4bd7ac082f02dc4d0c79de21e721b41641cbb831f23c2cb8fce2)
-
-
-
-
-
-## 2. 卡证识别技术解析
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/d7f96effc2434a3ca2d4144ff33c50282b830670c892487d8d7dec151921cce7)
-
-
-### 2.1 卡证分类模型
-
-卡证分类:基于PPLCNet
-
-与其他轻量级模型相比在CPU环境下ImageNet数据集上的表现
-
-![](https://ai-studio-static-online.cdn.bcebos.com/cbda3390cb994f98a3c8a9ba88c90c348497763f6c9f4b4797f7d63d84da5f63)
-
-![](https://ai-studio-static-online.cdn.bcebos.com/dedab7b7fd6543aa9e7f625132b24e3ba3f200e361fa468dac615f7814dfb98d)
-
-
-
-* 模型来自模型库PaddleClas,它是一个图像识别和图像分类任务的工具集,助力使用者训练出更好的视觉模型和应用落地。
-
-### 2.2 卡证识别模型
-
-* 检测:DBNet  识别:SVRT
-
-![](https://ai-studio-static-online.cdn.bcebos.com/9a7a4e19edc24310b46620f2ee7430f918223b93d4f14a15a52973c096926bad)
-
-
-* PPOCRv3在文本检测、识别进行了一系列改进优化,在保证精度的同时提升预测效率
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/6afdbb77e8db4aef9b169e4e94c5d90a9764cfab4f2c4c04aa9afdf4f54d7680)
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/c1a7d197847a4f168848c59b8e625d1d5e8066b778144395a8b9382bb85dc364)
-
-
-## 3. OCR技术拆解
-
-### 3.1技术流程
-
-![](https://ai-studio-static-online.cdn.bcebos.com/89ba046177864d8783ced6cb31ba92a66ca2169856a44ee59ac2bb18e44a6c4b)
-
-
-### 3.2 OCR技术拆解---卡证分类
-
-####  卡证分类:数据、模型准备
-
-
-A  使用爬虫获取无标注数据,将相同类别的放在同一文件夹下,文件名从0开始命名。具体格式如下图所示。
-
-​    注:卡证类数据,建议每个类别数据量在500张以上
-![](https://ai-studio-static-online.cdn.bcebos.com/6f875b6e695e4fe5aedf427beb0d4ce8064ad7cc33c44faaad59d3eb9732639d)
-
-
-B  一行命令生成标签文件
-
-```
-tree -r -i -f | grep -E "jpg|JPG|jpeg|JPEG|png|PNG|webp" | awk -F "/" '{print $0" "$2}' > train_list.txt
-```
-
-C [下载预训练模型 ](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
-
-
-
-####  卡证分类---修改配置文件
-
-
-配置文件主要修改三个部分:
-
-  全局参数:预训练模型路径/训练轮次/图像尺寸
-
-  模型结构:分类数
-
-  数据处理:训练/评估数据路径
-
-
-  ![](https://ai-studio-static-online.cdn.bcebos.com/e0dc05039c7444c5ab1260ff550a408748df8d4cfe864223adf390e51058dbd5)
-
-#### 卡证分类---训练
-
-
-指定配置文件启动训练:
-
-```
-!python /home/aistudio/work/PaddleClas/tools/train.py -c   /home/aistudio/work/PaddleClas/ppcls/configs/PULC/text_image_orientation/PPLCNet_x1_0.yaml
-```
-![](https://ai-studio-static-online.cdn.bcebos.com/06af09bde845449ba0a676410f4daa1cdc3983ac95034bdbbafac3b7fd94042f)
-
-​    注:日志中显示了训练结果和评估结果(训练时可以设置固定轮数评估一次)
-
-
-### 3.2 OCR技术拆解---卡证识别
-
-卡证识别(以身份证检测为例)
-存在的困难及问题:
-
-  * 在自然场景下,由于各种拍摄设备以及光线、角度不同等影响导致实际得到的证件影像千差万别。
-
-  * 如何快速提取需要的关键信息
-
-  * 多行的文本信息,检测结果如何正确拼接
-
-  ![](https://ai-studio-static-online.cdn.bcebos.com/4f8f5533a2914e0a821f4a639677843c32ec1f08a1b1488d94c0b8bfb6e72d2d)
-
-
-
-* OCR技术拆解---OCR工具库
-
-    PaddleOCR是一个丰富、领先且实用的OCR工具库,助力开发者训练出更好的模型并应用落地
-
-
-身份证识别:用现有的方法识别
-
-![](https://ai-studio-static-online.cdn.bcebos.com/12d402e6a06d482a88f979e0ebdfb39f4d3fc8b80517499689ec607ddb04fbf3)
-
-
-
-
-####  身份证识别:检测+分类
-
->   方法:基于现有的dbnet检测模型,加入分类方法。检测同时进行分类,从一定程度上优化识别流程
-
-![](https://ai-studio-static-online.cdn.bcebos.com/e1e798c87472477fa0bfca0da12bb0c180845a3e167a4761b0d26ff4330a5ccb)
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/23a5a19c746441309864586e467f995ec8a551a3661640e493fc4d77520309cd)
-
-#### 数据标注
-
-使用PaddleOCRLable进行快速标注
-
-![](https://ai-studio-static-online.cdn.bcebos.com/a73180425fa14f919ce52d9bf70246c3995acea1831843cca6c17d871b8f5d95)
-
-
-* 修改PPOCRLabel.py,将下图中的kie参数设置为True
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/d445cf4d850e4063b9a7fc6a075c12204cf912ff23ec471fa2e268b661b3d693)
-
-
-* 数据标注踩坑分享
-
-![](https://ai-studio-static-online.cdn.bcebos.com/89f42eccd600439fa9e28c97ccb663726e4e54ce3a854825b4c3b7d554ea21df)
-
-​    注:两者只有标注有差别,训练参数数据集都相同
-
-## 4 . 项目实践
-
-AIStudio项目链接:[快速构建卡证类OCR](https://aistudio.baidu.com/aistudio/projectdetail/4459116)
-
-### 4.1 环境准备
-
-1)拉取[paddleocr](https://github.com/PaddlePaddle/PaddleOCR)项目,如果从github上拉取速度慢可以选择从gitee上获取。
-```
-!git clone https://github.com/PaddlePaddle/PaddleOCR.git  -b release/2.6  /home/aistudio/work/
-```
-
-2)获取并解压预训练模型,如果要使用其他模型可以从模型库里自主选择合适模型。
-```
-!wget -P work/pre_trained/   https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_distill_train.tar
-!tar -vxf /home/aistudio/work/pre_trained/ch_PP-OCRv3_det_distill_train.tar -C /home/aistudio/work/pre_trained
-```
-3) 安装必要依赖
-```
-!pip install -r /home/aistudio/work/requirements.txt
-```
-
-### 4.2 配置文件修改
-
-修改配置文件 *work/configs/det/detmv3db.yml*
-
-具体修改说明如下:
-
-![](https://ai-studio-static-online.cdn.bcebos.com/fcdf517af5a6466294d72db7450209378d8efd9b77764e329d3f2aff3579a20c)
-
-  注:在上述的配置文件的Global变量中需要添加以下两个参数:
-
-​      label_list 为标签表
-​     num_classes 为分类数
-​     上述两个参数根据实际的情况配置即可
-
-
-![](https://ai-studio-static-online.cdn.bcebos.com/0b056be24f374812b61abf43305774767ae122c8479242f98aa0799b7bfc81d4)
-
-其中lable_list内容如下例所示,***建议第一个参数设置为 background,不要设置为实际要提取的关键信息种类***:
-
-![](https://ai-studio-static-online.cdn.bcebos.com/9fc78bbcdf754898b9b2c7f000ddf562afac786482ab4f2ab063e2242faa542a)
-
-配置文件中的其他设置说明
-
-![](https://ai-studio-static-online.cdn.bcebos.com/c7fc5e631dd44bc8b714630f4e49d9155a831d9e56c64e2482ded87081d0db22)
-
-![](https://ai-studio-static-online.cdn.bcebos.com/8d1022ac25d9474daa4fb236235bd58760039d58ad46414f841559d68e0d057f)
-
-![](https://ai-studio-static-online.cdn.bcebos.com/ee927ad9ebd442bb96f163a7ebbf4bc95e6bedee97324a51887cf82de0851fd3)
-
-
-
-
-### 4.3 代码修改
-
-
-#### 4.3.1 数据读取
-
-
-
-* 修改 PaddleOCR/ppocr/data/imaug/label_ops.py中的DetLabelEncode
-
-
-```python
-class DetLabelEncode(object):
-
-    # 修改检测标签的编码处,新增了参数分类数:num_classes,重写初始化方法,以及分类标签的读取
-
-    def __init__(self, label_list, num_classes=8, **kwargs):
-        self.num_classes = num_classes
-        self.label_list = []
-        if label_list:
-            if isinstance(label_list, str):
-                with open(label_list, 'r+', encoding='utf-8') as f:
-                    for line in f.readlines():
-                        self.label_list.append(line.replace("\n", ""))
-            else:
-                self.label_list = label_list
-        else:
-            assert ' please check label_list whether it is none or config is right'
-
-        if num_classes != len(self.label_list): # 校验分类数和标签的一致性
-            assert 'label_list length is not equal to the num_classes'
-
-    def __call__(self, data):
-        label = data['label']
-        label = json.loads(label)
-        nBox = len(label)
-        boxes, txts, txt_tags, classes = [], [], [], []
-        for bno in range(0, nBox):
-            box = label[bno]['points']
-            txt = label[bno]['key_cls']  # 此处将kie中的参数作为分类读取
-            boxes.append(box)
-            txts.append(txt)
-
-            if txt in ['*', '###']:
-                txt_tags.append(True)
-                if self.num_classes > 1:
-                    classes.append(-2)
-            else:
-                txt_tags.append(False)
-                if self.num_classes > 1:  # 将KIE内容的key标签作为分类标签使用
-                    classes.append(int(self.label_list.index(txt)))
-
-        if len(boxes) == 0:
-
-            return None
-        boxes = self.expand_points_num(boxes)
-        boxes = np.array(boxes, dtype=np.float32)
-        txt_tags = np.array(txt_tags, dtype=np.bool_)
-        classes = classes
-        data['polys'] = boxes
-        data['texts'] = txts
-        data['ignore_tags'] = txt_tags
-        if self.num_classes > 1:
-            data['classes'] = classes
-        return data
-```
-
-* 修改 PaddleOCR/ppocr/data/imaug/make_shrink_map.py中的MakeShrinkMap类。这里需要注意的是,如果我们设置的label_list中的第一个参数为要检测的信息那么会得到如下的mask,
-
-举例说明:
-这是检测的mask图,图中有四个mask那么实际对应的分类应该是4类
-
-![](https://ai-studio-static-online.cdn.bcebos.com/42d2188d3d6b498880952e12c3ceae1efabf135f8d9f4c31823f09ebe02ba9d2)
-
-
-
-label_list中第一个为关键分类,则得到的分类Mask实际如下,与上图相比,少了一个box:
-
-![](https://ai-studio-static-online.cdn.bcebos.com/864604967256461aa7c5d32cd240645e9f4c70af773341d5911f22d5a3e87b5f)
-
-
-
-```python
-class MakeShrinkMap(object):
-    r'''
-    Making binary mask from detection data with ICDAR format.
-    Typically following the process of class `MakeICDARData`.
-    '''
-
-    def __init__(self, min_text_size=8, shrink_ratio=0.4, num_classes=8, **kwargs):
-        self.min_text_size = min_text_size
-        self.shrink_ratio = shrink_ratio
-        self.num_classes = num_classes  #  添加了分类
-
-    def __call__(self, data):
-        image = data['image']
-        text_polys = data['polys']
-        ignore_tags = data['ignore_tags']
-        if self.num_classes > 1:
-            classes = data['classes']
-
-        h, w = image.shape[:2]
-        text_polys, ignore_tags = self.validate_polygons(text_polys,
-                                                         ignore_tags, h, w)
-        gt = np.zeros((h, w), dtype=np.float32)
-        mask = np.ones((h, w), dtype=np.float32)
-        gt_class = np.zeros((h, w), dtype=np.float32)  # 新增分类
-        for i in range(len(text_polys)):
-            polygon = text_polys[i]
-            height = max(polygon[:, 1]) - min(polygon[:, 1])
-            width = max(polygon[:, 0]) - min(polygon[:, 0])
-            if ignore_tags[i] or min(height, width) < self.min_text_size:
-                cv2.fillPoly(mask,
-                             polygon.astype(np.int32)[np.newaxis, :, :], 0)
-                ignore_tags[i] = True
-            else:
-                polygon_shape = Polygon(polygon)
-                subject = [tuple(l) for l in polygon]
-                padding = pyclipper.PyclipperOffset()
-                padding.AddPath(subject, pyclipper.JT_ROUND,
-                                pyclipper.ET_CLOSEDPOLYGON)
-                shrinked = []
-
-                # Increase the shrink ratio every time we get multiple polygon returned back
-                possible_ratios = np.arange(self.shrink_ratio, 1,
-                                            self.shrink_ratio)
-                np.append(possible_ratios, 1)
-                for ratio in possible_ratios:
-                    distance = polygon_shape.area * (
-                        1 - np.power(ratio, 2)) / polygon_shape.length
-                    shrinked = padding.Execute(-distance)
-                    if len(shrinked) == 1:
-                        break
-
-                if shrinked == []:
-                    cv2.fillPoly(mask,
-                                 polygon.astype(np.int32)[np.newaxis, :, :], 0)
-                    ignore_tags[i] = True
-                    continue
-
-                for each_shirnk in shrinked:
-                    shirnk = np.array(each_shirnk).reshape(-1, 2)
-                    cv2.fillPoly(gt, [shirnk.astype(np.int32)], 1)
-                    if self.num_classes > 1:  # 绘制分类的mask
-                        cv2.fillPoly(gt_class, polygon.astype(np.int32)[np.newaxis, :, :], classes[i])
-
-
-        data['shrink_map'] = gt
-
-        if self.num_classes > 1:
-            data['class_mask'] = gt_class
-
-        data['shrink_mask'] = mask
-        return data
-```
-
-由于在训练数据中会对数据进行resize设置,yml中的操作为:EastRandomCropData,所以需要修改PaddleOCR/ppocr/data/imaug/random_crop_data.py中的EastRandomCropData
-
-
-```python
-class EastRandomCropData(object):
-    def __init__(self,
-                 size=(640, 640),
-                 max_tries=10,
-                 min_crop_side_ratio=0.1,
-                 keep_ratio=True,
-                 num_classes=8,
-                 **kwargs):
-        self.size = size
-        self.max_tries = max_tries
-        self.min_crop_side_ratio = min_crop_side_ratio
-        self.keep_ratio = keep_ratio
-        self.num_classes = num_classes
-
-    def __call__(self, data):
-        img = data['image']
-        text_polys = data['polys']
-        ignore_tags = data['ignore_tags']
-        texts = data['texts']
-        if self.num_classes > 1:
-            classes = data['classes']
-        all_care_polys = [
-            text_polys[i] for i, tag in enumerate(ignore_tags) if not tag
-        ]
-        # 计算crop区域
-        crop_x, crop_y, crop_w, crop_h = crop_area(
-            img, all_care_polys, self.min_crop_side_ratio, self.max_tries)
-        # crop 图片 保持比例填充
-        scale_w = self.size[0] / crop_w
-        scale_h = self.size[1] / crop_h
-        scale = min(scale_w, scale_h)
-        h = int(crop_h * scale)
-        w = int(crop_w * scale)
-        if self.keep_ratio:
-            padimg = np.zeros((self.size[1], self.size[0], img.shape[2]),
-                              img.dtype)
-            padimg[:h, :w] = cv2.resize(
-                img[crop_y:crop_y + crop_h, crop_x:crop_x + crop_w], (w, h))
-            img = padimg
-        else:
-            img = cv2.resize(
-                img[crop_y:crop_y + crop_h, crop_x:crop_x + crop_w],
-                tuple(self.size))
-        # crop 文本框
-        text_polys_crop = []
-        ignore_tags_crop = []
-        texts_crop = []
-        classes_crop = []
-        for poly, text, tag,class_index in zip(text_polys, texts, ignore_tags,classes):
-            poly = ((poly - (crop_x, crop_y)) * scale).tolist()
-            if not is_poly_outside_rect(poly, 0, 0, w, h):
-                text_polys_crop.append(poly)
-                ignore_tags_crop.append(tag)
-                texts_crop.append(text)
-                if self.num_classes > 1:
-                    classes_crop.append(class_index)
-        data['image'] = img
-        data['polys'] = np.array(text_polys_crop)
-        data['ignore_tags'] = ignore_tags_crop
-        data['texts'] = texts_crop
-        if self.num_classes > 1:
-            data['classes'] = classes_crop
-        return data
-```
-
-#### 4.3.2  head修改
-
-
-
-主要修改 ppocr/modeling/heads/det_db_head.py,将Head类中的最后一层的输出修改为实际的分类数,同时在DBHead中新增分类的head。
-
-![](https://ai-studio-static-online.cdn.bcebos.com/0e25da2ccded4af19e95c85c3d3287ab4d53e31a4eed4607b6a4cb637c43f6d3)
-
-
-
-#### 4.3.3 修改loss
-
-
-修改PaddleOCR/ppocr/losses/det_db_loss.py中的DBLoss类,分类采用交叉熵损失函数进行计算。
-
-![](https://ai-studio-static-online.cdn.bcebos.com/dc10a070018d4d27946c26ec24a2a85bc3f16422f4964f72a9b63c6170d954e1)
-
-
-#### 4.3.4 后处理
-
-
-
-由于涉及到eval以及后续推理能否正常使用,我们需要修改后处理的相关代码,修改位置 PaddleOCR/ppocr/postprocess/db_postprocess.py中的DBPostProcess类
-
-
-```python
-class DBPostProcess(object):
-    """
-    The post process for Differentiable Binarization (DB).
-    """
-
-    def __init__(self,
-                 thresh=0.3,
-                 box_thresh=0.7,
-                 max_candidates=1000,
-                 unclip_ratio=2.0,
-                 use_dilation=False,
-                 score_mode="fast",
-                 **kwargs):
-        self.thresh = thresh
-        self.box_thresh = box_thresh
-        self.max_candidates = max_candidates
-        self.unclip_ratio = unclip_ratio
-        self.min_size = 3
-        self.score_mode = score_mode
-        assert score_mode in [
-            "slow", "fast"
-        ], "Score mode must be in [slow, fast] but got: {}".format(score_mode)
-
-        self.dilation_kernel = None if not use_dilation else np.array(
-            [[1, 1], [1, 1]])
-
-    def boxes_from_bitmap(self, pred, _bitmap, classes, dest_width, dest_height):
-        """
-        _bitmap: single map with shape (1, H, W),
-                whose values are binarized as {0, 1}
-        """
-
-        bitmap = _bitmap
-        height, width = bitmap.shape
-
-        outs = cv2.findContours((bitmap * 255).astype(np.uint8), cv2.RETR_LIST,
-                                cv2.CHAIN_APPROX_SIMPLE)
-        if len(outs) == 3:
-            img, contours, _ = outs[0], outs[1], outs[2]
-        elif len(outs) == 2:
-            contours, _ = outs[0], outs[1]
-
-        num_contours = min(len(contours), self.max_candidates)
-
-        boxes = []
-        scores = []
-        class_indexes = []
-        class_scores = []
-        for index in range(num_contours):
-            contour = contours[index]
-            points, sside = self.get_mini_boxes(contour)
-            if sside < self.min_size:
-                continue
-            points = np.array(points)
-            if self.score_mode == "fast":
-                score, class_index, class_score = self.box_score_fast(pred, points.reshape(-1, 2), classes)
-            else:
-                score, class_index, class_score = self.box_score_slow(pred, contour, classes)
-            if self.box_thresh > score:
-                continue
-
-            box = self.unclip(points).reshape(-1, 1, 2)
-            box, sside = self.get_mini_boxes(box)
-            if sside < self.min_size + 2:
-                continue
-            box = np.array(box)
-
-            box[:, 0] = np.clip(
-                np.round(box[:, 0] / width * dest_width), 0, dest_width)
-            box[:, 1] = np.clip(
-                np.round(box[:, 1] / height * dest_height), 0, dest_height)
-
-            boxes.append(box.astype(np.int16))
-            scores.append(score)
-
-            class_indexes.append(class_index)
-            class_scores.append(class_score)
-
-        if classes is None:
-            return np.array(boxes, dtype=np.int16), scores
-        else:
-            return np.array(boxes, dtype=np.int16), scores, class_indexes, class_scores
-
-    def unclip(self, box):
-        unclip_ratio = self.unclip_ratio
-        poly = Polygon(box)
-        distance = poly.area * unclip_ratio / poly.length
-        offset = pyclipper.PyclipperOffset()
-        offset.AddPath(box, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON)
-        expanded = np.array(offset.Execute(distance))
-        return expanded
-
-    def get_mini_boxes(self, contour):
-        bounding_box = cv2.minAreaRect(contour)
-        points = sorted(list(cv2.boxPoints(bounding_box)), key=lambda x: x[0])
-
-        index_1, index_2, index_3, index_4 = 0, 1, 2, 3
-        if points[1][1] > points[0][1]:
-            index_1 = 0
-            index_4 = 1
-        else:
-            index_1 = 1
-            index_4 = 0
-        if points[3][1] > points[2][1]:
-            index_2 = 2
-            index_3 = 3
-        else:
-            index_2 = 3
-            index_3 = 2
-
-        box = [
-            points[index_1], points[index_2], points[index_3], points[index_4]
-        ]
-        return box, min(bounding_box[1])
-
-    def box_score_fast(self, bitmap, _box, classes):
-        '''
-        box_score_fast: use bbox mean score as the mean score
-        '''
-        h, w = bitmap.shape[:2]
-        box = _box.copy()
-        xmin = np.clip(np.floor(box[:, 0].min()).astype(np.int32), 0, w - 1)
-        xmax = np.clip(np.ceil(box[:, 0].max()).astype(np.int32), 0, w - 1)
-        ymin = np.clip(np.floor(box[:, 1].min()).astype(np.int32), 0, h - 1)
-        ymax = np.clip(np.ceil(box[:, 1].max()).astype(np.int32), 0, h - 1)
-
-        mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
-        box[:, 0] = box[:, 0] - xmin
-        box[:, 1] = box[:, 1] - ymin
-        cv2.fillPoly(mask, box.reshape(1, -1, 2).astype(np.int32), 1)
-
-        if classes is None:
-            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], None, None
-        else:
-            k = 999
-            class_mask = np.full((ymax - ymin + 1, xmax - xmin + 1), k, dtype=np.int32)
-
-            cv2.fillPoly(class_mask, box.reshape(1, -1, 2).astype(np.int32), 0)
-            classes = classes[ymin:ymax + 1, xmin:xmax + 1]
-
-            new_classes = classes + class_mask
-            a = new_classes.reshape(-1)
-            b = np.where(a >= k)
-            classes = np.delete(a, b[0].tolist())
-
-            class_index = np.argmax(np.bincount(classes))
-            class_score = np.sum(classes == class_index) / len(classes)
-
-            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], class_index, class_score
-
-    def box_score_slow(self, bitmap, contour, classes):
-        """
-        box_score_slow: use polyon mean score as the mean score
-        """
-        h, w = bitmap.shape[:2]
-        contour = contour.copy()
-        contour = np.reshape(contour, (-1, 2))
-
-        xmin = np.clip(np.min(contour[:, 0]), 0, w - 1)
-        xmax = np.clip(np.max(contour[:, 0]), 0, w - 1)
-        ymin = np.clip(np.min(contour[:, 1]), 0, h - 1)
-        ymax = np.clip(np.max(contour[:, 1]), 0, h - 1)
-
-        mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
-
-        contour[:, 0] = contour[:, 0] - xmin
-        contour[:, 1] = contour[:, 1] - ymin
-
-        cv2.fillPoly(mask, contour.reshape(1, -1, 2).astype(np.int32), 1)
-
-        if classes is None:
-            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], None, None
-        else:
-            k = 999
-            class_mask = np.full((ymax - ymin + 1, xmax - xmin + 1), k, dtype=np.int32)
-
-            cv2.fillPoly(class_mask, contour.reshape(1, -1, 2).astype(np.int32), 0)
-            classes = classes[ymin:ymax + 1, xmin:xmax + 1]
-
-            new_classes = classes + class_mask
-            a = new_classes.reshape(-1)
-            b = np.where(a >= k)
-            classes = np.delete(a, b[0].tolist())
-
-            class_index = np.argmax(np.bincount(classes))
-            class_score = np.sum(classes == class_index) / len(classes)
-
-            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], class_index, class_score
-
-    def __call__(self, outs_dict, shape_list):
-        pred = outs_dict['maps']
-        if isinstance(pred, paddle.Tensor):
-            pred = pred.numpy()
-        pred = pred[:, 0, :, :]
-        segmentation = pred > self.thresh
-
-        if "classes" in outs_dict:
-            classes = outs_dict['classes']
-            if isinstance(classes, paddle.Tensor):
-                classes = classes.numpy()
-            classes = classes[:, 0, :, :]
-
-        else:
-            classes = None
-
-        boxes_batch = []
-        for batch_index in range(pred.shape[0]):
-            src_h, src_w, ratio_h, ratio_w = shape_list[batch_index]
-            if self.dilation_kernel is not None:
-                mask = cv2.dilate(
-                    np.array(segmentation[batch_index]).astype(np.uint8),
-                    self.dilation_kernel)
-            else:
-                mask = segmentation[batch_index]
-
-            if classes is None:
-                boxes, scores = self.boxes_from_bitmap(pred[batch_index], mask, None,
-                                                       src_w, src_h)
-                boxes_batch.append({'points': boxes})
-            else:
-                boxes, scores, class_indexes, class_scores = self.boxes_from_bitmap(pred[batch_index], mask,
-                                                                                      classes[batch_index],
-                                                                                      src_w, src_h)
-                boxes_batch.append({'points': boxes, "classes": class_indexes, "class_scores": class_scores})
-
-        return boxes_batch
-```
-
-### 4.4. 模型启动
-
-在完成上述步骤后我们就可以正常启动训练
-
-```
-!python /home/aistudio/work/PaddleOCR/tools/train.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
-```
-
-其他命令:
-```
-!python /home/aistudio/work/PaddleOCR/tools/eval.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
-!python /home/aistudio/work/PaddleOCR/tools/infer_det.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
-```
-模型推理
-```
-!python /home/aistudio/work/PaddleOCR/tools/infer/predict_det.py --image_dir="/home/aistudio/work/test_img/" --det_model_dir="/home/aistudio/work/PaddleOCR/output/infer"
-```
-
-## 5 总结
-
-1. 分类+检测在一定程度上能够缩短用时,具体的模型选取要根据业务场景恰当选择。
-2. 数据标注需要多次进行测试调整标注方法,一般进行检测模型微调,需要标注至少上百张。
-3. 设置合理的batch_size以及resize大小,同时注意lr设置。
-
-
-##  References
-
-1 https://github.com/PaddlePaddle/PaddleOCR
-
-2 https://github.com/PaddlePaddle/PaddleClas
-
-3 https://blog.csdn.net/YY007H/article/details/124491217
+# 快速构建卡证类OCR
+
+
+- [快速构建卡证类OCR](#快速构建卡证类ocr)
+  - [1. 金融行业卡证识别应用](#1-金融行业卡证识别应用)
+    - [1.1 金融行业中的OCR相关技术](#11-金融行业中的ocr相关技术)
+    - [1.2 金融行业中的卡证识别场景介绍](#12-金融行业中的卡证识别场景介绍)
+    - [1.3 OCR落地挑战](#13-ocr落地挑战)
+  - [2. 卡证识别技术解析](#2-卡证识别技术解析)
+    - [2.1 卡证分类模型](#21-卡证分类模型)
+    - [2.2 卡证识别模型](#22-卡证识别模型)
+  - [3. OCR技术拆解](#3-ocr技术拆解)
+    - [3.1技术流程](#31技术流程)
+    - [3.2 OCR技术拆解---卡证分类](#32-ocr技术拆解---卡证分类)
+      - [卡证分类:数据、模型准备](#卡证分类数据模型准备)
+      - [卡证分类---修改配置文件](#卡证分类---修改配置文件)
+      - [卡证分类---训练](#卡证分类---训练)
+    - [3.2 OCR技术拆解---卡证识别](#32-ocr技术拆解---卡证识别)
+      - [身份证识别:检测+分类](#身份证识别检测分类)
+      - [数据标注](#数据标注)
+  - [4 . 项目实践](#4--项目实践)
+    - [4.1 环境准备](#41-环境准备)
+    - [4.2 配置文件修改](#42-配置文件修改)
+    - [4.3 代码修改](#43-代码修改)
+      - [4.3.1 数据读取](#431-数据读取)
+      - [4.3.2  head修改](#432--head修改)
+      - [4.3.3 修改loss](#433-修改loss)
+      - [4.3.4 后处理](#434-后处理)
+    - [4.4. 模型启动](#44-模型启动)
+  - [5 总结](#5-总结)
+  - [References](#references)
+
+## 1. 金融行业卡证识别应用
+
+### 1.1 金融行业中的OCR相关技术
+
+* 《“十四五”数字经济发展规划》指出,2020年我国数字经济核心产业增加值占GDP比重达7.8%,随着数字经济迈向全面扩展,到2025年该比例将提升至10%。
+
+* 在过去数年的跨越发展与积累沉淀中,数字金融、金融科技已在对金融业的重塑与再造中充分印证了其自身价值。
+
+* 以智能为目标,提升金融数字化水平,实现业务流程自动化,降低人力成本。
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/8bb381f164c54ea9b4043cf66fc92ffdea8aaf851bab484fa6e19bd2f93f154f)
+
+
+
+### 1.2 金融行业中的卡证识别场景介绍
+
+应用场景:身份证、银行卡、营业执照、驾驶证等。
+
+应用难点:由于数据的采集来源多样,以及实际采集数据各种噪声:反光、褶皱、模糊、倾斜等各种问题干扰。
+
+![](https://ai-studio-static-online.cdn.bcebos.com/981640e17d05487e961162f8576c9e11634ca157f79048d4bd9d3bc21722afe8)
+
+
+
+### 1.3 OCR落地挑战
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/a5973a8ddeff4bd7ac082f02dc4d0c79de21e721b41641cbb831f23c2cb8fce2)
+
+
+
+
+
+## 2. 卡证识别技术解析
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/d7f96effc2434a3ca2d4144ff33c50282b830670c892487d8d7dec151921cce7)
+
+
+### 2.1 卡证分类模型
+
+卡证分类:基于PPLCNet
+
+与其他轻量级模型相比在CPU环境下ImageNet数据集上的表现
+
+![](https://ai-studio-static-online.cdn.bcebos.com/cbda3390cb994f98a3c8a9ba88c90c348497763f6c9f4b4797f7d63d84da5f63)
+
+![](https://ai-studio-static-online.cdn.bcebos.com/dedab7b7fd6543aa9e7f625132b24e3ba3f200e361fa468dac615f7814dfb98d)
+
+
+
+* 模型来自模型库PaddleClas,它是一个图像识别和图像分类任务的工具集,助力使用者训练出更好的视觉模型和应用落地。
+
+### 2.2 卡证识别模型
+
+* 检测:DBNet  识别:SVRT
+
+![](https://ai-studio-static-online.cdn.bcebos.com/9a7a4e19edc24310b46620f2ee7430f918223b93d4f14a15a52973c096926bad)
+
+
+* PPOCRv3在文本检测、识别进行了一系列改进优化,在保证精度的同时提升预测效率
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/6afdbb77e8db4aef9b169e4e94c5d90a9764cfab4f2c4c04aa9afdf4f54d7680)
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/c1a7d197847a4f168848c59b8e625d1d5e8066b778144395a8b9382bb85dc364)
+
+
+## 3. OCR技术拆解
+
+### 3.1技术流程
+
+![](https://ai-studio-static-online.cdn.bcebos.com/89ba046177864d8783ced6cb31ba92a66ca2169856a44ee59ac2bb18e44a6c4b)
+
+
+### 3.2 OCR技术拆解---卡证分类
+
+####  卡证分类:数据、模型准备
+
+
+A  使用爬虫获取无标注数据,将相同类别的放在同一文件夹下,文件名从0开始命名。具体格式如下图所示。
+
+​    注:卡证类数据,建议每个类别数据量在500张以上
+![](https://ai-studio-static-online.cdn.bcebos.com/6f875b6e695e4fe5aedf427beb0d4ce8064ad7cc33c44faaad59d3eb9732639d)
+
+
+B  一行命令生成标签文件
+
+```
+tree -r -i -f | grep -E "jpg|JPG|jpeg|JPEG|png|PNG|webp" | awk -F "/" '{print $0" "$2}' > train_list.txt
+```
+
+C [下载预训练模型 ](https://github.com/PaddlePaddle/PaddleClas/blob/release/2.4/docs/zh_CN/models/PP-LCNet.md)
+
+
+
+####  卡证分类---修改配置文件
+
+
+配置文件主要修改三个部分:
+
+  全局参数:预训练模型路径/训练轮次/图像尺寸
+
+  模型结构:分类数
+
+  数据处理:训练/评估数据路径
+
+
+  ![](https://ai-studio-static-online.cdn.bcebos.com/e0dc05039c7444c5ab1260ff550a408748df8d4cfe864223adf390e51058dbd5)
+
+#### 卡证分类---训练
+
+
+指定配置文件启动训练:
+
+```
+!python /home/aistudio/work/PaddleClas/tools/train.py -c   /home/aistudio/work/PaddleClas/ppcls/configs/PULC/text_image_orientation/PPLCNet_x1_0.yaml
+```
+![](https://ai-studio-static-online.cdn.bcebos.com/06af09bde845449ba0a676410f4daa1cdc3983ac95034bdbbafac3b7fd94042f)
+
+​    注:日志中显示了训练结果和评估结果(训练时可以设置固定轮数评估一次)
+
+
+### 3.2 OCR技术拆解---卡证识别
+
+卡证识别(以身份证检测为例)
+存在的困难及问题:
+
+  * 在自然场景下,由于各种拍摄设备以及光线、角度不同等影响导致实际得到的证件影像千差万别。
+
+  * 如何快速提取需要的关键信息
+
+  * 多行的文本信息,检测结果如何正确拼接
+
+  ![](https://ai-studio-static-online.cdn.bcebos.com/4f8f5533a2914e0a821f4a639677843c32ec1f08a1b1488d94c0b8bfb6e72d2d)
+
+
+
+* OCR技术拆解---OCR工具库
+
+    PaddleOCR是一个丰富、领先且实用的OCR工具库,助力开发者训练出更好的模型并应用落地
+
+
+身份证识别:用现有的方法识别
+
+![](https://ai-studio-static-online.cdn.bcebos.com/12d402e6a06d482a88f979e0ebdfb39f4d3fc8b80517499689ec607ddb04fbf3)
+
+
+
+
+####  身份证识别:检测+分类
+
+>   方法:基于现有的dbnet检测模型,加入分类方法。检测同时进行分类,从一定程度上优化识别流程
+
+![](https://ai-studio-static-online.cdn.bcebos.com/e1e798c87472477fa0bfca0da12bb0c180845a3e167a4761b0d26ff4330a5ccb)
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/23a5a19c746441309864586e467f995ec8a551a3661640e493fc4d77520309cd)
+
+#### 数据标注
+
+使用PaddleOCRLable进行快速标注
+
+![](https://ai-studio-static-online.cdn.bcebos.com/a73180425fa14f919ce52d9bf70246c3995acea1831843cca6c17d871b8f5d95)
+
+
+* 修改PPOCRLabel.py,将下图中的kie参数设置为True
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/d445cf4d850e4063b9a7fc6a075c12204cf912ff23ec471fa2e268b661b3d693)
+
+
+* 数据标注踩坑分享
+
+![](https://ai-studio-static-online.cdn.bcebos.com/89f42eccd600439fa9e28c97ccb663726e4e54ce3a854825b4c3b7d554ea21df)
+
+​    注:两者只有标注有差别,训练参数数据集都相同
+
+## 4 . 项目实践
+
+AIStudio项目链接:[快速构建卡证类OCR](https://aistudio.baidu.com/aistudio/projectdetail/4459116)
+
+### 4.1 环境准备
+
+1)拉取[paddleocr](https://github.com/PaddlePaddle/PaddleOCR)项目,如果从github上拉取速度慢可以选择从gitee上获取。
+```
+!git clone https://github.com/PaddlePaddle/PaddleOCR.git  -b release/2.6  /home/aistudio/work/
+```
+
+2)获取并解压预训练模型,如果要使用其他模型可以从模型库里自主选择合适模型。
+```
+!wget -P work/pre_trained/   https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_distill_train.tar
+!tar -vxf /home/aistudio/work/pre_trained/ch_PP-OCRv3_det_distill_train.tar -C /home/aistudio/work/pre_trained
+```
+3) 安装必要依赖
+```
+!pip install -r /home/aistudio/work/requirements.txt
+```
+
+### 4.2 配置文件修改
+
+修改配置文件 *work/configs/det/detmv3db.yml*
+
+具体修改说明如下:
+
+![](https://ai-studio-static-online.cdn.bcebos.com/fcdf517af5a6466294d72db7450209378d8efd9b77764e329d3f2aff3579a20c)
+
+  注:在上述的配置文件的Global变量中需要添加以下两个参数:
+
+​      label_list 为标签表
+​     num_classes 为分类数
+​     上述两个参数根据实际的情况配置即可
+
+
+![](https://ai-studio-static-online.cdn.bcebos.com/0b056be24f374812b61abf43305774767ae122c8479242f98aa0799b7bfc81d4)
+
+其中lable_list内容如下例所示,***建议第一个参数设置为 background,不要设置为实际要提取的关键信息种类***:
+
+![](https://ai-studio-static-online.cdn.bcebos.com/9fc78bbcdf754898b9b2c7f000ddf562afac786482ab4f2ab063e2242faa542a)
+
+配置文件中的其他设置说明
+
+![](https://ai-studio-static-online.cdn.bcebos.com/c7fc5e631dd44bc8b714630f4e49d9155a831d9e56c64e2482ded87081d0db22)
+
+![](https://ai-studio-static-online.cdn.bcebos.com/8d1022ac25d9474daa4fb236235bd58760039d58ad46414f841559d68e0d057f)
+
+![](https://ai-studio-static-online.cdn.bcebos.com/ee927ad9ebd442bb96f163a7ebbf4bc95e6bedee97324a51887cf82de0851fd3)
+
+
+
+
+### 4.3 代码修改
+
+
+#### 4.3.1 数据读取
+
+
+
+* 修改 PaddleOCR/ppocr/data/imaug/label_ops.py中的DetLabelEncode
+
+
+```python
+class DetLabelEncode(object):
+
+    # 修改检测标签的编码处,新增了参数分类数:num_classes,重写初始化方法,以及分类标签的读取
+
+    def __init__(self, label_list, num_classes=8, **kwargs):
+        self.num_classes = num_classes
+        self.label_list = []
+        if label_list:
+            if isinstance(label_list, str):
+                with open(label_list, 'r+', encoding='utf-8') as f:
+                    for line in f.readlines():
+                        self.label_list.append(line.replace("\n", ""))
+            else:
+                self.label_list = label_list
+        else:
+            assert ' please check label_list whether it is none or config is right'
+
+        if num_classes != len(self.label_list): # 校验分类数和标签的一致性
+            assert 'label_list length is not equal to the num_classes'
+
+    def __call__(self, data):
+        label = data['label']
+        label = json.loads(label)
+        nBox = len(label)
+        boxes, txts, txt_tags, classes = [], [], [], []
+        for bno in range(0, nBox):
+            box = label[bno]['points']
+            txt = label[bno]['key_cls']  # 此处将kie中的参数作为分类读取
+            boxes.append(box)
+            txts.append(txt)
+
+            if txt in ['*', '###']:
+                txt_tags.append(True)
+                if self.num_classes > 1:
+                    classes.append(-2)
+            else:
+                txt_tags.append(False)
+                if self.num_classes > 1:  # 将KIE内容的key标签作为分类标签使用
+                    classes.append(int(self.label_list.index(txt)))
+
+        if len(boxes) == 0:
+
+            return None
+        boxes = self.expand_points_num(boxes)
+        boxes = np.array(boxes, dtype=np.float32)
+        txt_tags = np.array(txt_tags, dtype=np.bool_)
+        classes = classes
+        data['polys'] = boxes
+        data['texts'] = txts
+        data['ignore_tags'] = txt_tags
+        if self.num_classes > 1:
+            data['classes'] = classes
+        return data
+```
+
+* 修改 PaddleOCR/ppocr/data/imaug/make_shrink_map.py中的MakeShrinkMap类。这里需要注意的是,如果我们设置的label_list中的第一个参数为要检测的信息那么会得到如下的mask,
+
+举例说明:
+这是检测的mask图,图中有四个mask那么实际对应的分类应该是4类
+
+![](https://ai-studio-static-online.cdn.bcebos.com/42d2188d3d6b498880952e12c3ceae1efabf135f8d9f4c31823f09ebe02ba9d2)
+
+
+
+label_list中第一个为关键分类,则得到的分类Mask实际如下,与上图相比,少了一个box:
+
+![](https://ai-studio-static-online.cdn.bcebos.com/864604967256461aa7c5d32cd240645e9f4c70af773341d5911f22d5a3e87b5f)
+
+
+
+```python
+class MakeShrinkMap(object):
+    r'''
+    Making binary mask from detection data with ICDAR format.
+    Typically following the process of class `MakeICDARData`.
+    '''
+
+    def __init__(self, min_text_size=8, shrink_ratio=0.4, num_classes=8, **kwargs):
+        self.min_text_size = min_text_size
+        self.shrink_ratio = shrink_ratio
+        self.num_classes = num_classes  #  添加了分类
+
+    def __call__(self, data):
+        image = data['image']
+        text_polys = data['polys']
+        ignore_tags = data['ignore_tags']
+        if self.num_classes > 1:
+            classes = data['classes']
+
+        h, w = image.shape[:2]
+        text_polys, ignore_tags = self.validate_polygons(text_polys,
+                                                         ignore_tags, h, w)
+        gt = np.zeros((h, w), dtype=np.float32)
+        mask = np.ones((h, w), dtype=np.float32)
+        gt_class = np.zeros((h, w), dtype=np.float32)  # 新增分类
+        for i in range(len(text_polys)):
+            polygon = text_polys[i]
+            height = max(polygon[:, 1]) - min(polygon[:, 1])
+            width = max(polygon[:, 0]) - min(polygon[:, 0])
+            if ignore_tags[i] or min(height, width) < self.min_text_size:
+                cv2.fillPoly(mask,
+                             polygon.astype(np.int32)[np.newaxis, :, :], 0)
+                ignore_tags[i] = True
+            else:
+                polygon_shape = Polygon(polygon)
+                subject = [tuple(l) for l in polygon]
+                padding = pyclipper.PyclipperOffset()
+                padding.AddPath(subject, pyclipper.JT_ROUND,
+                                pyclipper.ET_CLOSEDPOLYGON)
+                shrinked = []
+
+                # Increase the shrink ratio every time we get multiple polygon returned back
+                possible_ratios = np.arange(self.shrink_ratio, 1,
+                                            self.shrink_ratio)
+                np.append(possible_ratios, 1)
+                for ratio in possible_ratios:
+                    distance = polygon_shape.area * (
+                        1 - np.power(ratio, 2)) / polygon_shape.length
+                    shrinked = padding.Execute(-distance)
+                    if len(shrinked) == 1:
+                        break
+
+                if shrinked == []:
+                    cv2.fillPoly(mask,
+                                 polygon.astype(np.int32)[np.newaxis, :, :], 0)
+                    ignore_tags[i] = True
+                    continue
+
+                for each_shirnk in shrinked:
+                    shirnk = np.array(each_shirnk).reshape(-1, 2)
+                    cv2.fillPoly(gt, [shirnk.astype(np.int32)], 1)
+                    if self.num_classes > 1:  # 绘制分类的mask
+                        cv2.fillPoly(gt_class, polygon.astype(np.int32)[np.newaxis, :, :], classes[i])
+
+
+        data['shrink_map'] = gt
+
+        if self.num_classes > 1:
+            data['class_mask'] = gt_class
+
+        data['shrink_mask'] = mask
+        return data
+```
+
+由于在训练数据中会对数据进行resize设置,yml中的操作为:EastRandomCropData,所以需要修改PaddleOCR/ppocr/data/imaug/random_crop_data.py中的EastRandomCropData
+
+
+```python
+class EastRandomCropData(object):
+    def __init__(self,
+                 size=(640, 640),
+                 max_tries=10,
+                 min_crop_side_ratio=0.1,
+                 keep_ratio=True,
+                 num_classes=8,
+                 **kwargs):
+        self.size = size
+        self.max_tries = max_tries
+        self.min_crop_side_ratio = min_crop_side_ratio
+        self.keep_ratio = keep_ratio
+        self.num_classes = num_classes
+
+    def __call__(self, data):
+        img = data['image']
+        text_polys = data['polys']
+        ignore_tags = data['ignore_tags']
+        texts = data['texts']
+        if self.num_classes > 1:
+            classes = data['classes']
+        all_care_polys = [
+            text_polys[i] for i, tag in enumerate(ignore_tags) if not tag
+        ]
+        # 计算crop区域
+        crop_x, crop_y, crop_w, crop_h = crop_area(
+            img, all_care_polys, self.min_crop_side_ratio, self.max_tries)
+        # crop 图片 保持比例填充
+        scale_w = self.size[0] / crop_w
+        scale_h = self.size[1] / crop_h
+        scale = min(scale_w, scale_h)
+        h = int(crop_h * scale)
+        w = int(crop_w * scale)
+        if self.keep_ratio:
+            padimg = np.zeros((self.size[1], self.size[0], img.shape[2]),
+                              img.dtype)
+            padimg[:h, :w] = cv2.resize(
+                img[crop_y:crop_y + crop_h, crop_x:crop_x + crop_w], (w, h))
+            img = padimg
+        else:
+            img = cv2.resize(
+                img[crop_y:crop_y + crop_h, crop_x:crop_x + crop_w],
+                tuple(self.size))
+        # crop 文本框
+        text_polys_crop = []
+        ignore_tags_crop = []
+        texts_crop = []
+        classes_crop = []
+        for poly, text, tag,class_index in zip(text_polys, texts, ignore_tags,classes):
+            poly = ((poly - (crop_x, crop_y)) * scale).tolist()
+            if not is_poly_outside_rect(poly, 0, 0, w, h):
+                text_polys_crop.append(poly)
+                ignore_tags_crop.append(tag)
+                texts_crop.append(text)
+                if self.num_classes > 1:
+                    classes_crop.append(class_index)
+        data['image'] = img
+        data['polys'] = np.array(text_polys_crop)
+        data['ignore_tags'] = ignore_tags_crop
+        data['texts'] = texts_crop
+        if self.num_classes > 1:
+            data['classes'] = classes_crop
+        return data
+```
+
+#### 4.3.2  head修改
+
+
+
+主要修改 ppocr/modeling/heads/det_db_head.py,将Head类中的最后一层的输出修改为实际的分类数,同时在DBHead中新增分类的head。
+
+![](https://ai-studio-static-online.cdn.bcebos.com/0e25da2ccded4af19e95c85c3d3287ab4d53e31a4eed4607b6a4cb637c43f6d3)
+
+
+
+#### 4.3.3 修改loss
+
+
+修改PaddleOCR/ppocr/losses/det_db_loss.py中的DBLoss类,分类采用交叉熵损失函数进行计算。
+
+![](https://ai-studio-static-online.cdn.bcebos.com/dc10a070018d4d27946c26ec24a2a85bc3f16422f4964f72a9b63c6170d954e1)
+
+
+#### 4.3.4 后处理
+
+
+
+由于涉及到eval以及后续推理能否正常使用,我们需要修改后处理的相关代码,修改位置 PaddleOCR/ppocr/postprocess/db_postprocess.py中的DBPostProcess类
+
+
+```python
+class DBPostProcess(object):
+    """
+    The post process for Differentiable Binarization (DB).
+    """
+
+    def __init__(self,
+                 thresh=0.3,
+                 box_thresh=0.7,
+                 max_candidates=1000,
+                 unclip_ratio=2.0,
+                 use_dilation=False,
+                 score_mode="fast",
+                 **kwargs):
+        self.thresh = thresh
+        self.box_thresh = box_thresh
+        self.max_candidates = max_candidates
+        self.unclip_ratio = unclip_ratio
+        self.min_size = 3
+        self.score_mode = score_mode
+        assert score_mode in [
+            "slow", "fast"
+        ], "Score mode must be in [slow, fast] but got: {}".format(score_mode)
+
+        self.dilation_kernel = None if not use_dilation else np.array(
+            [[1, 1], [1, 1]])
+
+    def boxes_from_bitmap(self, pred, _bitmap, classes, dest_width, dest_height):
+        """
+        _bitmap: single map with shape (1, H, W),
+                whose values are binarized as {0, 1}
+        """
+
+        bitmap = _bitmap
+        height, width = bitmap.shape
+
+        outs = cv2.findContours((bitmap * 255).astype(np.uint8), cv2.RETR_LIST,
+                                cv2.CHAIN_APPROX_SIMPLE)
+        if len(outs) == 3:
+            img, contours, _ = outs[0], outs[1], outs[2]
+        elif len(outs) == 2:
+            contours, _ = outs[0], outs[1]
+
+        num_contours = min(len(contours), self.max_candidates)
+
+        boxes = []
+        scores = []
+        class_indexes = []
+        class_scores = []
+        for index in range(num_contours):
+            contour = contours[index]
+            points, sside = self.get_mini_boxes(contour)
+            if sside < self.min_size:
+                continue
+            points = np.array(points)
+            if self.score_mode == "fast":
+                score, class_index, class_score = self.box_score_fast(pred, points.reshape(-1, 2), classes)
+            else:
+                score, class_index, class_score = self.box_score_slow(pred, contour, classes)
+            if self.box_thresh > score:
+                continue
+
+            box = self.unclip(points).reshape(-1, 1, 2)
+            box, sside = self.get_mini_boxes(box)
+            if sside < self.min_size + 2:
+                continue
+            box = np.array(box)
+
+            box[:, 0] = np.clip(
+                np.round(box[:, 0] / width * dest_width), 0, dest_width)
+            box[:, 1] = np.clip(
+                np.round(box[:, 1] / height * dest_height), 0, dest_height)
+
+            boxes.append(box.astype(np.int16))
+            scores.append(score)
+
+            class_indexes.append(class_index)
+            class_scores.append(class_score)
+
+        if classes is None:
+            return np.array(boxes, dtype=np.int16), scores
+        else:
+            return np.array(boxes, dtype=np.int16), scores, class_indexes, class_scores
+
+    def unclip(self, box):
+        unclip_ratio = self.unclip_ratio
+        poly = Polygon(box)
+        distance = poly.area * unclip_ratio / poly.length
+        offset = pyclipper.PyclipperOffset()
+        offset.AddPath(box, pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON)
+        expanded = np.array(offset.Execute(distance))
+        return expanded
+
+    def get_mini_boxes(self, contour):
+        bounding_box = cv2.minAreaRect(contour)
+        points = sorted(list(cv2.boxPoints(bounding_box)), key=lambda x: x[0])
+
+        index_1, index_2, index_3, index_4 = 0, 1, 2, 3
+        if points[1][1] > points[0][1]:
+            index_1 = 0
+            index_4 = 1
+        else:
+            index_1 = 1
+            index_4 = 0
+        if points[3][1] > points[2][1]:
+            index_2 = 2
+            index_3 = 3
+        else:
+            index_2 = 3
+            index_3 = 2
+
+        box = [
+            points[index_1], points[index_2], points[index_3], points[index_4]
+        ]
+        return box, min(bounding_box[1])
+
+    def box_score_fast(self, bitmap, _box, classes):
+        '''
+        box_score_fast: use bbox mean score as the mean score
+        '''
+        h, w = bitmap.shape[:2]
+        box = _box.copy()
+        xmin = np.clip(np.floor(box[:, 0].min()).astype(np.int32), 0, w - 1)
+        xmax = np.clip(np.ceil(box[:, 0].max()).astype(np.int32), 0, w - 1)
+        ymin = np.clip(np.floor(box[:, 1].min()).astype(np.int32), 0, h - 1)
+        ymax = np.clip(np.ceil(box[:, 1].max()).astype(np.int32), 0, h - 1)
+
+        mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
+        box[:, 0] = box[:, 0] - xmin
+        box[:, 1] = box[:, 1] - ymin
+        cv2.fillPoly(mask, box.reshape(1, -1, 2).astype(np.int32), 1)
+
+        if classes is None:
+            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], None, None
+        else:
+            k = 999
+            class_mask = np.full((ymax - ymin + 1, xmax - xmin + 1), k, dtype=np.int32)
+
+            cv2.fillPoly(class_mask, box.reshape(1, -1, 2).astype(np.int32), 0)
+            classes = classes[ymin:ymax + 1, xmin:xmax + 1]
+
+            new_classes = classes + class_mask
+            a = new_classes.reshape(-1)
+            b = np.where(a >= k)
+            classes = np.delete(a, b[0].tolist())
+
+            class_index = np.argmax(np.bincount(classes))
+            class_score = np.sum(classes == class_index) / len(classes)
+
+            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], class_index, class_score
+
+    def box_score_slow(self, bitmap, contour, classes):
+        """
+        box_score_slow: use polyon mean score as the mean score
+        """
+        h, w = bitmap.shape[:2]
+        contour = contour.copy()
+        contour = np.reshape(contour, (-1, 2))
+
+        xmin = np.clip(np.min(contour[:, 0]), 0, w - 1)
+        xmax = np.clip(np.max(contour[:, 0]), 0, w - 1)
+        ymin = np.clip(np.min(contour[:, 1]), 0, h - 1)
+        ymax = np.clip(np.max(contour[:, 1]), 0, h - 1)
+
+        mask = np.zeros((ymax - ymin + 1, xmax - xmin + 1), dtype=np.uint8)
+
+        contour[:, 0] = contour[:, 0] - xmin
+        contour[:, 1] = contour[:, 1] - ymin
+
+        cv2.fillPoly(mask, contour.reshape(1, -1, 2).astype(np.int32), 1)
+
+        if classes is None:
+            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], None, None
+        else:
+            k = 999
+            class_mask = np.full((ymax - ymin + 1, xmax - xmin + 1), k, dtype=np.int32)
+
+            cv2.fillPoly(class_mask, contour.reshape(1, -1, 2).astype(np.int32), 0)
+            classes = classes[ymin:ymax + 1, xmin:xmax + 1]
+
+            new_classes = classes + class_mask
+            a = new_classes.reshape(-1)
+            b = np.where(a >= k)
+            classes = np.delete(a, b[0].tolist())
+
+            class_index = np.argmax(np.bincount(classes))
+            class_score = np.sum(classes == class_index) / len(classes)
+
+            return cv2.mean(bitmap[ymin:ymax + 1, xmin:xmax + 1], mask)[0], class_index, class_score
+
+    def __call__(self, outs_dict, shape_list):
+        pred = outs_dict['maps']
+        if isinstance(pred, paddle.Tensor):
+            pred = pred.numpy()
+        pred = pred[:, 0, :, :]
+        segmentation = pred > self.thresh
+
+        if "classes" in outs_dict:
+            classes = outs_dict['classes']
+            if isinstance(classes, paddle.Tensor):
+                classes = classes.numpy()
+            classes = classes[:, 0, :, :]
+
+        else:
+            classes = None
+
+        boxes_batch = []
+        for batch_index in range(pred.shape[0]):
+            src_h, src_w, ratio_h, ratio_w = shape_list[batch_index]
+            if self.dilation_kernel is not None:
+                mask = cv2.dilate(
+                    np.array(segmentation[batch_index]).astype(np.uint8),
+                    self.dilation_kernel)
+            else:
+                mask = segmentation[batch_index]
+
+            if classes is None:
+                boxes, scores = self.boxes_from_bitmap(pred[batch_index], mask, None,
+                                                       src_w, src_h)
+                boxes_batch.append({'points': boxes})
+            else:
+                boxes, scores, class_indexes, class_scores = self.boxes_from_bitmap(pred[batch_index], mask,
+                                                                                      classes[batch_index],
+                                                                                      src_w, src_h)
+                boxes_batch.append({'points': boxes, "classes": class_indexes, "class_scores": class_scores})
+
+        return boxes_batch
+```
+
+### 4.4. 模型启动
+
+在完成上述步骤后我们就可以正常启动训练
+
+```
+!python /home/aistudio/work/PaddleOCR/tools/train.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
+```
+
+其他命令:
+```
+!python /home/aistudio/work/PaddleOCR/tools/eval.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
+!python /home/aistudio/work/PaddleOCR/tools/infer_det.py  -c  /home/aistudio/work/PaddleOCR/configs/det/det_mv3_db.yml
+```
+模型推理
+```
+!python /home/aistudio/work/PaddleOCR/tools/infer/predict_det.py --image_dir="/home/aistudio/work/test_img/" --det_model_dir="/home/aistudio/work/PaddleOCR/output/infer"
+```
+
+## 5 总结
+
+1. 分类+检测在一定程度上能够缩短用时,具体的模型选取要根据业务场景恰当选择。
+2. 数据标注需要多次进行测试调整标注方法,一般进行检测模型微调,需要标注至少上百张。
+3. 设置合理的batch_size以及resize大小,同时注意lr设置。
+
+
+##  References
+
+1 https://github.com/PaddlePaddle/PaddleOCR
+
+2 https://github.com/PaddlePaddle/PaddleClas
+
+3 https://blog.csdn.net/YY007H/article/details/124491217
diff --git a/applications/手写文字识别.md b/applications/手写文字识别.md
index b2bfdb3aa..3685b6645 100644
--- a/applications/手写文字识别.md
+++ b/applications/手写文字识别.md
@@ -228,7 +228,7 @@ python tools/infer/predict_rec.py --image_dir="train_data/handwrite/HWDB2.0Test_
 
 ```python
 # 可视化文字识别图片
-from PIL import Image  
+from PIL import Image
 import matplotlib.pyplot as plt
 import numpy as np
 import os
@@ -238,10 +238,10 @@ img_path = 'train_data/handwrite/HWDB2.0Test_images/104-P16_4.jpg'
 
 def vis(img_path):
     plt.figure()
-    image = Image.open(img_path)  
+    image = Image.open(img_path)
     plt.imshow(image)
     plt.show()
-    # image = image.resize([208, 208])  
+    # image = image.resize([208, 208])
 
 
 vis(img_path)
diff --git a/applications/液晶屏读数识别.md b/applications/液晶屏读数识别.md
index 9e11407e0..2e55dbcd9 100644
--- a/applications/液晶屏读数识别.md
+++ b/applications/液晶屏读数识别.md
@@ -64,7 +64,7 @@ unzip icdar2015.zip  -d train_data
 
 ```python
 # 随机查看文字检测数据集图片
-from PIL import Image  
+from PIL import Image
 import matplotlib.pyplot as plt
 import numpy as np
 import os
@@ -77,13 +77,13 @@ def get_one_image(train):
     files = os.listdir(train)
     n = len(files)
     ind = np.random.randint(0,n)
-    img_dir = os.path.join(train,files[ind])  
-    image = Image.open(img_dir)  
+    img_dir = os.path.join(train,files[ind])
+    image = Image.open(img_dir)
     plt.imshow(image)
     plt.show()
-    image = image.resize([208, 208])  
+    image = image.resize([208, 208])
 
-get_one_image(train)  
+get_one_image(train)
 ```
 ![det_png](https://ai-studio-static-online.cdn.bcebos.com/0639da09b774458096ae577e82b2c59e89ced6a00f55458f946997ab7472a4f8)
 
@@ -355,7 +355,7 @@ unzip ic15_data.zip -d train_data
 
 ```python
 # 随机查看文字检测数据集图片
-from PIL import Image  
+from PIL import Image
 import matplotlib.pyplot as plt
 import numpy as np
 import os
@@ -367,11 +367,11 @@ def get_one_image(train):
     files = os.listdir(train)
     n = len(files)
     ind = np.random.randint(0,n)
-    img_dir = os.path.join(train,files[ind])  
-    image = Image.open(img_dir)  
+    img_dir = os.path.join(train,files[ind])
+    image = Image.open(img_dir)
     plt.imshow(image)
     plt.show()
-    image = image.resize([208, 208])  
+    image = image.resize([208, 208])
 
 get_one_image(train)
 ```
diff --git a/benchmark/PaddleOCR_DBNet/.gitattributes b/benchmark/PaddleOCR_DBNet/.gitattributes
index 8543e0a71..b4419d46e 100644
--- a/benchmark/PaddleOCR_DBNet/.gitattributes
+++ b/benchmark/PaddleOCR_DBNet/.gitattributes
@@ -1,2 +1,2 @@
 *.html linguist-language=python
-*.ipynb linguist-language=python
\ No newline at end of file
+*.ipynb linguist-language=python
diff --git a/benchmark/PaddleOCR_DBNet/.gitignore b/benchmark/PaddleOCR_DBNet/.gitignore
index cef1c73b3..f18fe1018 100644
--- a/benchmark/PaddleOCR_DBNet/.gitignore
+++ b/benchmark/PaddleOCR_DBNet/.gitignore
@@ -13,4 +13,4 @@ datasets/
 index/
 train_log/
 log/
-profiling_log/
\ No newline at end of file
+profiling_log/
diff --git a/benchmark/PaddleOCR_DBNet/config/SynthText.yaml b/benchmark/PaddleOCR_DBNet/config/SynthText.yaml
index 61d5da7d3..8fd511c80 100644
--- a/benchmark/PaddleOCR_DBNet/config/SynthText.yaml
+++ b/benchmark/PaddleOCR_DBNet/config/SynthText.yaml
@@ -37,4 +37,4 @@ dataset:
       batch_size: 1
       shuffle: true
       num_workers: 0
-      collate_fn: ''
\ No newline at end of file
+      collate_fn: ''
diff --git a/benchmark/PaddleOCR_DBNet/config/SynthText_resnet18_FPN_DBhead_polyLR.yaml b/benchmark/PaddleOCR_DBNet/config/SynthText_resnet18_FPN_DBhead_polyLR.yaml
index a665e94a7..c285d361b 100644
--- a/benchmark/PaddleOCR_DBNet/config/SynthText_resnet18_FPN_DBhead_polyLR.yaml
+++ b/benchmark/PaddleOCR_DBNet/config/SynthText_resnet18_FPN_DBhead_polyLR.yaml
@@ -62,4 +62,4 @@ dataset:
       batch_size: 2
       shuffle: true
       num_workers: 6
-      collate_fn: ''
\ No newline at end of file
+      collate_fn: ''
diff --git a/benchmark/PaddleOCR_DBNet/config/icdar2015.yaml b/benchmark/PaddleOCR_DBNet/config/icdar2015.yaml
index 4551b14b2..4233d3e2d 100644
--- a/benchmark/PaddleOCR_DBNet/config/icdar2015.yaml
+++ b/benchmark/PaddleOCR_DBNet/config/icdar2015.yaml
@@ -66,4 +66,4 @@ dataset:
       batch_size: 1
       shuffle: true
       num_workers: 0
-      collate_fn: ICDARCollectFN
\ No newline at end of file
+      collate_fn: ICDARCollectFN
diff --git a/benchmark/PaddleOCR_DBNet/config/icdar2015_dcn_resnet18_FPN_DBhead_polyLR.yaml b/benchmark/PaddleOCR_DBNet/config/icdar2015_dcn_resnet18_FPN_DBhead_polyLR.yaml
index 608ef42c1..3e2442838 100644
--- a/benchmark/PaddleOCR_DBNet/config/icdar2015_dcn_resnet18_FPN_DBhead_polyLR.yaml
+++ b/benchmark/PaddleOCR_DBNet/config/icdar2015_dcn_resnet18_FPN_DBhead_polyLR.yaml
@@ -79,4 +79,4 @@ dataset:
       batch_size: 1
       shuffle: true
       num_workers: 6
-      collate_fn: ICDARCollectFN
\ No newline at end of file
+      collate_fn: ICDARCollectFN
diff --git a/benchmark/PaddleOCR_DBNet/config/open_dataset.yaml b/benchmark/PaddleOCR_DBNet/config/open_dataset.yaml
index 97267586c..05ece6e91 100644
--- a/benchmark/PaddleOCR_DBNet/config/open_dataset.yaml
+++ b/benchmark/PaddleOCR_DBNet/config/open_dataset.yaml
@@ -70,4 +70,4 @@ dataset:
       batch_size: 1
       shuffle: true
       num_workers: 0
-      collate_fn: ICDARCollectFN
\ No newline at end of file
+      collate_fn: ICDARCollectFN
diff --git a/benchmark/PaddleOCR_DBNet/eval.sh b/benchmark/PaddleOCR_DBNet/eval.sh
index b3bf46818..7520a73cf 100644
--- a/benchmark/PaddleOCR_DBNet/eval.sh
+++ b/benchmark/PaddleOCR_DBNet/eval.sh
@@ -1 +1 @@
-CUDA_VISIBLE_DEVICES=0 python3 tools/eval.py --model_path ''
\ No newline at end of file
+CUDA_VISIBLE_DEVICES=0 python3 tools/eval.py --model_path ''
diff --git a/benchmark/PaddleOCR_DBNet/multi_gpu_train.sh b/benchmark/PaddleOCR_DBNet/multi_gpu_train.sh
index b49a73f15..4b9a9158c 100644
--- a/benchmark/PaddleOCR_DBNet/multi_gpu_train.sh
+++ b/benchmark/PaddleOCR_DBNet/multi_gpu_train.sh
@@ -1,2 +1,2 @@
 # export NCCL_P2P_DISABLE=1
-CUDA_VISIBLE_DEVICES=0,1,2,3 python3 -m paddle.distributed.launch tools/train.py --config_file "config/icdar2015_resnet50_FPN_DBhead_polyLR.yaml"
\ No newline at end of file
+CUDA_VISIBLE_DEVICES=0,1,2,3 python3 -m paddle.distributed.launch tools/train.py --config_file "config/icdar2015_resnet50_FPN_DBhead_polyLR.yaml"
diff --git a/benchmark/PaddleOCR_DBNet/predict.sh b/benchmark/PaddleOCR_DBNet/predict.sh
index 37ab14828..a4b9bfa4f 100644
--- a/benchmark/PaddleOCR_DBNet/predict.sh
+++ b/benchmark/PaddleOCR_DBNet/predict.sh
@@ -1 +1 @@
-CUDA_VISIBLE_DEVICES=0 python tools/predict.py --model_path model_best.pth --input_folder ./input --output_folder ./output --thre 0.7 --polygon --show --save_result
\ No newline at end of file
+CUDA_VISIBLE_DEVICES=0 python tools/predict.py --model_path model_best.pth --input_folder ./input --output_folder ./output --thre 0.7 --polygon --show --save_result
diff --git a/benchmark/PaddleOCR_DBNet/singlel_gpu_train.sh b/benchmark/PaddleOCR_DBNet/singlel_gpu_train.sh
index f8b9f0e89..6803201fc 100644
--- a/benchmark/PaddleOCR_DBNet/singlel_gpu_train.sh
+++ b/benchmark/PaddleOCR_DBNet/singlel_gpu_train.sh
@@ -1 +1 @@
-CUDA_VISIBLE_DEVICES=0 python3 tools/train.py --config_file "config/icdar2015_resnet50_FPN_DBhead_polyLR.yaml" 
\ No newline at end of file
+CUDA_VISIBLE_DEVICES=0 python3 tools/train.py --config_file "config/icdar2015_resnet50_FPN_DBhead_polyLR.yaml" 
diff --git a/benchmark/PaddleOCR_DBNet/test/README.MD b/benchmark/PaddleOCR_DBNet/test/README.MD
index b43c6e9a1..3e2bb1a78 100644
--- a/benchmark/PaddleOCR_DBNet/test/README.MD
+++ b/benchmark/PaddleOCR_DBNet/test/README.MD
@@ -5,4 +5,4 @@ img_{img_id}.jpg
 
 For predicting single images, you can change the `img_path` in the `/tools/predict.py` to your image number.
 
-The result will be saved in the output_folder(default is test/output) you give in predict.sh
\ No newline at end of file
+The result will be saved in the output_folder(default is test/output) you give in predict.sh
diff --git a/benchmark/PaddleOCR_DBNet/test_tipc/common_func.sh b/benchmark/PaddleOCR_DBNet/test_tipc/common_func.sh
index c123d3cf6..9ec22f03a 100644
--- a/benchmark/PaddleOCR_DBNet/test_tipc/common_func.sh
+++ b/benchmark/PaddleOCR_DBNet/test_tipc/common_func.sh
@@ -64,4 +64,4 @@ function status_check(){
     else
         echo -e "\033[33m Run failed with command - ${model_name} - ${run_command} - ${log_path} \033[0m" | tee -a ${run_log}
     fi
-}
\ No newline at end of file
+}
diff --git a/benchmark/PaddleOCR_DBNet/test_tipc/prepare.sh b/benchmark/PaddleOCR_DBNet/test_tipc/prepare.sh
index cd8f56fd7..a9616032b 100644
--- a/benchmark/PaddleOCR_DBNet/test_tipc/prepare.sh
+++ b/benchmark/PaddleOCR_DBNet/test_tipc/prepare.sh
@@ -51,4 +51,4 @@ elif [ ${MODE} = "benchmark_train" ];then
         # cat dup* > train_icdar2015_label.txt && rm -rf dup*
         # cd ../../../
     fi
-fi
\ No newline at end of file
+fi
diff --git a/benchmark/PaddleOCR_DBNet/test_tipc/test_train_inference_python.sh b/benchmark/PaddleOCR_DBNet/test_tipc/test_train_inference_python.sh
index a54591a60..33c81e1c3 100644
--- a/benchmark/PaddleOCR_DBNet/test_tipc/test_train_inference_python.sh
+++ b/benchmark/PaddleOCR_DBNet/test_tipc/test_train_inference_python.sh
@@ -340,4 +340,4 @@ else
             done  # done with:    for trainer in ${trainer_list[*]}; do 
         done      # done with:    for autocast in ${autocast_list[*]}; do 
     done          # done with:    for gpu in ${gpu_list[*]}; do
-fi  # end if [ ${MODE} = "infer" ]; then
\ No newline at end of file
+fi  # end if [ ${MODE} = "infer" ]; then
diff --git a/benchmark/PaddleOCR_DBNet/tools/infer.py b/benchmark/PaddleOCR_DBNet/tools/infer.py
index 5ed4b8e94..1ca4bef80 100644
--- a/benchmark/PaddleOCR_DBNet/tools/infer.py
+++ b/benchmark/PaddleOCR_DBNet/tools/infer.py
@@ -194,7 +194,9 @@ class InferenceEngine(object):
                 box_list = [box_list[i] for i, v in enumerate(idx) if v]
                 score_list = [score_list[i] for i, v in enumerate(idx) if v]
             else:
-                idx = box_list.reshape(box_list.shape[0], -1).sum(axis=1) > 0  # 去掉全为0的框
+                idx = (
+                    box_list.reshape(box_list.shape[0], -1).sum(axis=1) > 0
+                )  # 去掉全为0的框
                 box_list, score_list = box_list[idx], score_list[idx]
         else:
             box_list, score_list = [], []
diff --git a/benchmark/run_benchmark_det.sh b/benchmark/run_benchmark_det.sh
index 9f5b46cde..125d8743b 100644
--- a/benchmark/run_benchmark_det.sh
+++ b/benchmark/run_benchmark_det.sh
@@ -59,4 +59,3 @@ source ${BENCHMARK_ROOT}/scripts/run_model.sh      # 在该脚本中会对符合
 _set_params $@
 #_train      # 如果只想产出训练log,不解析,可取消注释
 _run         # 该函数在run_model.sh中,执行时会调用_train; 如果不联调只想要产出训练log可以注掉本行,提交时需打开
-
diff --git a/benchmark/run_det.sh b/benchmark/run_det.sh
index 981510c9a..75b0f17f8 100644
--- a/benchmark/run_det.sh
+++ b/benchmark/run_det.sh
@@ -34,6 +34,3 @@ for model_mode in ${model_mode_list[@]}; do
             done
       done
 done
-
-
-
diff --git a/configs/det/det_mv3_east.yml b/configs/det/det_mv3_east.yml
index 4ae32ab00..461179e4e 100644
--- a/configs/det/det_mv3_east.yml
+++ b/configs/det/det_mv3_east.yml
@@ -106,4 +106,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/configs/det/det_mv3_pse.yml b/configs/det/det_mv3_pse.yml
index f80180ce7..4b8c4be2f 100644
--- a/configs/det/det_mv3_pse.yml
+++ b/configs/det/det_mv3_pse.yml
@@ -132,4 +132,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 8
\ No newline at end of file
+    num_workers: 8
diff --git a/configs/det/det_r50_drrg_ctw.yml b/configs/det/det_r50_drrg_ctw.yml
index f67c926f3..f56ac3963 100755
--- a/configs/det/det_r50_drrg_ctw.yml
+++ b/configs/det/det_r50_drrg_ctw.yml
@@ -130,4 +130,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/configs/det/det_r50_vd_dcn_fce_ctw.yml b/configs/det/det_r50_vd_dcn_fce_ctw.yml
index 3a4075b32..5e851d1ac 100755
--- a/configs/det/det_r50_vd_dcn_fce_ctw.yml
+++ b/configs/det/det_r50_vd_dcn_fce_ctw.yml
@@ -136,4 +136,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/configs/det/det_r50_vd_east.yml b/configs/det/det_r50_vd_east.yml
index af90ef0ad..5a488ddb0 100644
--- a/configs/det/det_r50_vd_east.yml
+++ b/configs/det/det_r50_vd_east.yml
@@ -105,4 +105,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/configs/det/det_r50_vd_pse.yml b/configs/det/det_r50_vd_pse.yml
index 1a971564f..408e16d11 100644
--- a/configs/det/det_r50_vd_pse.yml
+++ b/configs/det/det_r50_vd_pse.yml
@@ -131,4 +131,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 8
\ No newline at end of file
+    num_workers: 8
diff --git a/configs/det/det_r50_vd_sast_totaltext.yml b/configs/det/det_r50_vd_sast_totaltext.yml
index 44a0766b1..557ff8bf0 100755
--- a/configs/det/det_r50_vd_sast_totaltext.yml
+++ b/configs/det/det_r50_vd_sast_totaltext.yml
@@ -105,4 +105,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/configs/kie/vi_layoutxlm/re_vi_layoutxlm_xfund_zh_udml.yml b/configs/kie/vi_layoutxlm/re_vi_layoutxlm_xfund_zh_udml.yml
index eda9fcddb..eda3a2b97 100644
--- a/configs/kie/vi_layoutxlm/re_vi_layoutxlm_xfund_zh_udml.yml
+++ b/configs/kie/vi_layoutxlm/re_vi_layoutxlm_xfund_zh_udml.yml
@@ -173,5 +173,3 @@ Eval:
     drop_last: False
     batch_size_per_card: 8
     num_workers: 8
-
-
diff --git a/configs/rec/multi_language/generate_multi_language_configs.py b/configs/rec/multi_language/generate_multi_language_configs.py
index e41be8f77..d4b0b5131 100644
--- a/configs/rec/multi_language/generate_multi_language_configs.py
+++ b/configs/rec/multi_language/generate_multi_language_configs.py
@@ -198,9 +198,9 @@ class ArgsParser(ArgumentParser):
             lang = "cyrillic"
         elif lang in devanagari_lang:
             lang = "devanagari"
-        global_config["Global"][
-            "character_dict_path"
-        ] = "ppocr/utils/dict/{}_dict.txt".format(lang)
+        global_config["Global"]["character_dict_path"] = (
+            "ppocr/utils/dict/{}_dict.txt".format(lang)
+        )
         global_config["Global"]["save_model_dir"] = "./output/rec_{}_lite".format(lang)
         global_config["Train"]["dataset"]["label_file_list"] = [
             "train_data/{}_train.txt".format(lang)
diff --git a/configs/rec/rec_satrn.yml b/configs/rec/rec_satrn.yml
index 8ed688b65..376c2ccfe 100644
--- a/configs/rec/rec_satrn.yml
+++ b/configs/rec/rec_satrn.yml
@@ -114,4 +114,3 @@ Eval:
     batch_size_per_card: 128 
     num_workers: 4
     use_shared_memory: False
-
diff --git a/configs/sr/sr_telescope.yml b/configs/sr/sr_telescope.yml
index 33d07e8f2..ed257251c 100644
--- a/configs/sr/sr_telescope.yml
+++ b/configs/sr/sr_telescope.yml
@@ -81,4 +81,3 @@ Eval:
     drop_last: False
     batch_size_per_card: 16
     num_workers: 4
-
diff --git a/configs/sr/sr_tsrn_transformer_strock.yml b/configs/sr/sr_tsrn_transformer_strock.yml
index c8c308c43..627bf24a4 100644
--- a/configs/sr/sr_tsrn_transformer_strock.yml
+++ b/configs/sr/sr_tsrn_transformer_strock.yml
@@ -82,4 +82,3 @@ Eval:
     drop_last: False
     batch_size_per_card: 16
     num_workers: 4
-
diff --git a/configs/table/table_master.yml b/configs/table/table_master.yml
index 125162f18..37cf44dec 100755
--- a/configs/table/table_master.yml
+++ b/configs/table/table_master.yml
@@ -142,4 +142,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 10
-    num_workers: 8
\ No newline at end of file
+    num_workers: 8
diff --git a/deploy/android_demo/.gitignore b/deploy/android_demo/.gitignore
index 93dcb2935..d77f574d8 100644
--- a/deploy/android_demo/.gitignore
+++ b/deploy/android_demo/.gitignore
@@ -6,4 +6,3 @@
 /build
 /captures
 .externalNativeBuild
-
diff --git a/deploy/android_demo/app/build.gradle b/deploy/android_demo/app/build.gradle
index 2607f32ec..00fae70d2 100644
--- a/deploy/android_demo/app/build.gradle
+++ b/deploy/android_demo/app/build.gradle
@@ -90,4 +90,4 @@ task downloadAndExtractArchives(type: DefaultTask) {
         }
     }
 }
-preBuild.dependsOn downloadAndExtractArchives
\ No newline at end of file
+preBuild.dependsOn downloadAndExtractArchives
diff --git a/deploy/android_demo/app/src/main/AndroidManifest.xml b/deploy/android_demo/app/src/main/AndroidManifest.xml
index 133f35703..fef3a396f 100644
--- a/deploy/android_demo/app/src/main/AndroidManifest.xml
+++ b/deploy/android_demo/app/src/main/AndroidManifest.xml
@@ -35,4 +35,4 @@
         </provider>
     </application>
 
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/deploy/android_demo/app/src/main/cpp/CMakeLists.txt b/deploy/android_demo/app/src/main/cpp/CMakeLists.txt
index 742786ad6..39b710262 100644
--- a/deploy/android_demo/app/src/main/cpp/CMakeLists.txt
+++ b/deploy/android_demo/app/src/main/cpp/CMakeLists.txt
@@ -114,4 +114,4 @@ add_custom_command(
         COMMAND
         ${CMAKE_COMMAND} -E copy
         ${PaddleLite_DIR}/cxx/libs/${ANDROID_ABI}/libhiai_ir_build.so
-        ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/libhiai_ir_build.so)
\ No newline at end of file
+        ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/libhiai_ir_build.so)
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_cls_process.cpp b/deploy/android_demo/app/src/main/cpp/ocr_cls_process.cpp
index 141b5157a..c3434d9cf 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_cls_process.cpp
+++ b/deploy/android_demo/app/src/main/cpp/ocr_cls_process.cpp
@@ -42,4 +42,4 @@ cv::Mat cls_resize_img(const cv::Mat &img) {
                        cv::BORDER_CONSTANT, {0, 0, 0});
   }
   return resize_img;
-}
\ No newline at end of file
+}
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_cls_process.h b/deploy/android_demo/app/src/main/cpp/ocr_cls_process.h
index 1c30ee107..17480a92d 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_cls_process.h
+++ b/deploy/android_demo/app/src/main/cpp/ocr_cls_process.h
@@ -20,4 +20,4 @@
 
 extern const std::vector<int> CLS_IMAGE_SHAPE;
 
-cv::Mat cls_resize_img(const cv::Mat &img);
\ No newline at end of file
+cv::Mat cls_resize_img(const cv::Mat &img);
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_crnn_process.h b/deploy/android_demo/app/src/main/cpp/ocr_crnn_process.h
index 0346afe45..6ce812805 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_crnn_process.h
+++ b/deploy/android_demo/app/src/main/cpp/ocr_crnn_process.h
@@ -17,4 +17,4 @@ cv::Mat crnn_resize_img(const cv::Mat &img, float wh_ratio);
 template <class ForwardIterator>
 inline size_t argmax(ForwardIterator first, ForwardIterator last) {
   return std::distance(first, std::max_element(first, last));
-}
\ No newline at end of file
+}
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.cpp b/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.cpp
index 9816ea4ac..ec731dd02 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.cpp
+++ b/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.cpp
@@ -339,4 +339,4 @@ filter_tag_det_res(const std::vector<std::vector<std::vector<int>>> &o_boxes,
     root_points.push_back(boxes[n]);
   }
   return root_points;
-}
\ No newline at end of file
+}
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.h b/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.h
index 327da36ce..6d847627a 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.h
+++ b/deploy/android_demo/app/src/main/cpp/ocr_db_post_process.h
@@ -10,4 +10,4 @@ boxes_from_bitmap(const cv::Mat &pred, const cv::Mat &bitmap);
 
 std::vector<std::vector<std::vector<int>>>
 filter_tag_det_res(const std::vector<std::vector<std::vector<int>>> &o_boxes,
-                   float ratio_h, float ratio_w, const cv::Mat &srcimg);
\ No newline at end of file
+                   float ratio_h, float ratio_w, const cv::Mat &srcimg);
diff --git a/deploy/android_demo/app/src/main/cpp/ocr_ppredictor.cpp b/deploy/android_demo/app/src/main/cpp/ocr_ppredictor.cpp
index 277ec80f1..95b86a212 100644
--- a/deploy/android_demo/app/src/main/cpp/ocr_ppredictor.cpp
+++ b/deploy/android_demo/app/src/main/cpp/ocr_ppredictor.cpp
@@ -351,4 +351,4 @@ float OCR_PPredictor::postprocess_rec_score(const PredictorOutput &res) {
 }
 
 NET_TYPE OCR_PPredictor::get_net_flag() const { return NET_OCR; }
-} // namespace ppredictor
\ No newline at end of file
+} // namespace ppredictor
diff --git a/deploy/android_demo/app/src/main/cpp/ppredictor.cpp b/deploy/android_demo/app/src/main/cpp/ppredictor.cpp
index a4725017b..10b8d7e96 100644
--- a/deploy/android_demo/app/src/main/cpp/ppredictor.cpp
+++ b/deploy/android_demo/app/src/main/cpp/ppredictor.cpp
@@ -96,4 +96,4 @@ std::vector<PredictorOutput> PPredictor::infer() {
 }
 
 NET_TYPE PPredictor::get_net_flag() const { return (NET_TYPE)_net_flag; }
-} // namespace ppredictor
\ No newline at end of file
+} // namespace ppredictor
diff --git a/deploy/android_demo/app/src/main/cpp/predictor_input.cpp b/deploy/android_demo/app/src/main/cpp/predictor_input.cpp
index 035be764d..2d5666f7c 100644
--- a/deploy/android_demo/app/src/main/cpp/predictor_input.cpp
+++ b/deploy/android_demo/app/src/main/cpp/predictor_input.cpp
@@ -25,4 +25,4 @@ void PredictorInput::set_data(const float *input_data, int input_float_len) {
   float *input_raw_data = get_mutable_float_data();
   memcpy(input_raw_data, input_data, input_float_len * sizeof(float));
 }
-} // namespace ppredictor
\ No newline at end of file
+} // namespace ppredictor
diff --git a/deploy/android_demo/app/src/main/cpp/predictor_output.cpp b/deploy/android_demo/app/src/main/cpp/predictor_output.cpp
index 43ef68931..2176ef502 100644
--- a/deploy/android_demo/app/src/main/cpp/predictor_output.cpp
+++ b/deploy/android_demo/app/src/main/cpp/predictor_output.cpp
@@ -23,4 +23,4 @@ int64_t PredictorOutput::get_size() const {
 const std::vector<int64_t> PredictorOutput::get_shape() const {
   return _tensor->shape();
 }
-} // namespace ppredictor
\ No newline at end of file
+} // namespace ppredictor
diff --git a/deploy/android_demo/app/src/main/cpp/preprocess.cpp b/deploy/android_demo/app/src/main/cpp/preprocess.cpp
index e99b2cd18..f3b29ecde 100644
--- a/deploy/android_demo/app/src/main/cpp/preprocess.cpp
+++ b/deploy/android_demo/app/src/main/cpp/preprocess.cpp
@@ -79,4 +79,4 @@ void neon_mean_scale(const float *din, float *dout, int size,
     *(dout_c1++) = (*(din++) - mean[1]) * scale[1];
     *(dout_c2++) = (*(din++) - mean[2]) * scale[2];
   }
-}
\ No newline at end of file
+}
diff --git a/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
index eca70cfe5..6b78462d6 100644
--- a/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
+++ b/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -2,4 +2,4 @@
 <adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
     <background android:drawable="@drawable/ic_launcher_background" />
     <foreground android:drawable="@drawable/ic_launcher_foreground" />
-</adaptive-icon>
\ No newline at end of file
+</adaptive-icon>
diff --git a/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
index eca70cfe5..6b78462d6 100644
--- a/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
+++ b/deploy/android_demo/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -2,4 +2,4 @@
 <adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
     <background android:drawable="@drawable/ic_launcher_background" />
     <foreground android:drawable="@drawable/ic_launcher_foreground" />
-</adaptive-icon>
\ No newline at end of file
+</adaptive-icon>
diff --git a/deploy/android_demo/app/src/main/res/values/arrays.xml b/deploy/android_demo/app/src/main/res/values/arrays.xml
index 54bb6e28d..9805070a4 100644
--- a/deploy/android_demo/app/src/main/res/values/arrays.xml
+++ b/deploy/android_demo/app/src/main/res/values/arrays.xml
@@ -64,4 +64,4 @@
         <item>识别</item>
         <item>分类</item>
     </string-array>
-</resources>
\ No newline at end of file
+</resources>
diff --git a/deploy/android_demo/app/src/main/res/values/strings.xml b/deploy/android_demo/app/src/main/res/values/strings.xml
index 6ee1f30f3..38b9a78b6 100644
--- a/deploy/android_demo/app/src/main/res/values/strings.xml
+++ b/deploy/android_demo/app/src/main/res/values/strings.xml
@@ -17,4 +17,3 @@
     <string name="DET_LONG_SIZE_DEFAULT">960</string>
     <string name="SCORE_THRESHOLD_DEFAULT">0.1</string>
 </resources>
-
diff --git a/deploy/android_demo/app/src/main/res/xml/file_paths.xml b/deploy/android_demo/app/src/main/res/xml/file_paths.xml
index 3d985443b..06482689d 100644
--- a/deploy/android_demo/app/src/main/res/xml/file_paths.xml
+++ b/deploy/android_demo/app/src/main/res/xml/file_paths.xml
@@ -1,4 +1,4 @@
 <?xml version="1.0" encoding="utf-8"?>
 <paths xmlns:android="http://schemas.android.com/apk/res/android">
     <external-files-path name="my_images" path="Pictures" />
-</paths>
\ No newline at end of file
+</paths>
diff --git a/deploy/android_demo/app/src/test/java/com/baidu/paddle/lite/demo/ocr/ExampleUnitTest.java b/deploy/android_demo/app/src/test/java/com/baidu/paddle/lite/demo/ocr/ExampleUnitTest.java
index d523a9a70..ea4d51f5c 100644
--- a/deploy/android_demo/app/src/test/java/com/baidu/paddle/lite/demo/ocr/ExampleUnitTest.java
+++ b/deploy/android_demo/app/src/test/java/com/baidu/paddle/lite/demo/ocr/ExampleUnitTest.java
@@ -14,4 +14,4 @@ public class ExampleUnitTest {
     public void addition_isCorrect() {
         assertEquals(4, 2 + 2);
     }
-}
\ No newline at end of file
+}
diff --git a/deploy/avh/.gitignore b/deploy/avh/.gitignore
index faeba235a..a2f78a2b9 100644
--- a/deploy/avh/.gitignore
+++ b/deploy/avh/.gitignore
@@ -2,4 +2,4 @@ include/inputs.h
 include/outputs.h
 
 __pycache__/
-build/
\ No newline at end of file
+build/
diff --git a/deploy/avh/README.md b/deploy/avh/README.md
index b40933ecf..463861f4c 100644
--- a/deploy/avh/README.md
+++ b/deploy/avh/README.md
@@ -44,7 +44,7 @@ Case 3: If the demo is not run in the ci_cpu Docker container, then you will nee
     pip install -r ./requirements.txt
     ```
 
-In case2 and case3:  
+In case2 and case3:
 
 You will need to update your PATH environment variable to include the path to cmake 3.19.5 and the FVP.
 For example if you've installed these in ```/opt/arm``` , then you would do the following:
diff --git a/deploy/avh/configure_avh.sh b/deploy/avh/configure_avh.sh
index 8042fd81d..478927170 100755
--- a/deploy/avh/configure_avh.sh
+++ b/deploy/avh/configure_avh.sh
@@ -76,4 +76,4 @@ echo -e "\e[36mArm(R) Arm(R) GNU Toolchain Installation SUCCESS\e[0m"
 # Install TVM from TLCPack
 echo -e "\e[36mStart installing TVM\e[0m"
 pip install tlcpack-nightly -f https://tlcpack.ai/wheels
-echo -e "\e[36mTVM Installation SUCCESS\e[0m"
\ No newline at end of file
+echo -e "\e[36mTVM Installation SUCCESS\e[0m"
diff --git a/deploy/cpp_infer/docs/windows_vs2019_build.md b/deploy/cpp_infer/docs/windows_vs2019_build.md
index 5a27d7990..94205d9a0 100644
--- a/deploy/cpp_infer/docs/windows_vs2019_build.md
+++ b/deploy/cpp_infer/docs/windows_vs2019_build.md
@@ -45,7 +45,7 @@ paddle_inference
 
 #### 1.2.2 安装配置OpenCV
 
-1. 在OpenCV官网下载适用于Windows平台的Opencv, [下载地址](https://github.com/opencv/opencv/releases)  
+1. 在OpenCV官网下载适用于Windows平台的Opencv, [下载地址](https://github.com/opencv/opencv/releases)
 2. 运行下载的可执行文件,将OpenCV解压至指定目录,如`D:\projects\cpp\opencv`
 
 #### 1.2.3 下载PaddleOCR代码
diff --git a/deploy/cpp_infer/external-cmake/auto-log.cmake b/deploy/cpp_infer/external-cmake/auto-log.cmake
index c998b3b14..7201d85cd 100644
--- a/deploy/cpp_infer/external-cmake/auto-log.cmake
+++ b/deploy/cpp_infer/external-cmake/auto-log.cmake
@@ -11,4 +11,3 @@ FetchContent_Declare(
   GIT_TAG        main
 )
 FetchContent_MakeAvailable(extern_Autolog)
-
diff --git a/deploy/cpp_infer/include/args.h b/deploy/cpp_infer/include/args.h
index e6e76ef92..31c8c26cc 100644
--- a/deploy/cpp_infer/include/args.h
+++ b/deploy/cpp_infer/include/args.h
@@ -65,4 +65,4 @@ DECLARE_bool(det);
 DECLARE_bool(rec);
 DECLARE_bool(cls);
 DECLARE_bool(table);
-DECLARE_bool(layout);
\ No newline at end of file
+DECLARE_bool(layout);
diff --git a/deploy/cpp_infer/include/ocr_det.h b/deploy/cpp_infer/include/ocr_det.h
index 9f6f25205..ca069c5ad 100644
--- a/deploy/cpp_infer/include/ocr_det.h
+++ b/deploy/cpp_infer/include/ocr_det.h
@@ -97,4 +97,4 @@ private:
   DBPostProcessor post_processor_;
 };
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/include/paddlestructure.h b/deploy/cpp_infer/include/paddlestructure.h
index 9ae54f48f..6377f5b74 100644
--- a/deploy/cpp_infer/include/paddlestructure.h
+++ b/deploy/cpp_infer/include/paddlestructure.h
@@ -63,4 +63,4 @@ private:
   }
 };
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/include/preprocess_op.h b/deploy/cpp_infer/include/preprocess_op.h
index 0b2e18330..7f66a5c45 100644
--- a/deploy/cpp_infer/include/preprocess_op.h
+++ b/deploy/cpp_infer/include/preprocess_op.h
@@ -79,4 +79,4 @@ public:
                    const int w);
 };
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/include/structure_layout.h b/deploy/cpp_infer/include/structure_layout.h
index 3dd605720..d1f488861 100644
--- a/deploy/cpp_infer/include/structure_layout.h
+++ b/deploy/cpp_infer/include/structure_layout.h
@@ -75,4 +75,4 @@ private:
   PicodetPostProcessor post_processor_;
 };
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/include/structure_table.h b/deploy/cpp_infer/include/structure_table.h
index 616e95d21..7664ec52a 100644
--- a/deploy/cpp_infer/include/structure_table.h
+++ b/deploy/cpp_infer/include/structure_table.h
@@ -83,4 +83,4 @@ private:
 
 }; // class StructureTableRecognizer
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/include/utility.h b/deploy/cpp_infer/include/utility.h
index 7dfe03dd6..b2198f957 100644
--- a/deploy/cpp_infer/include/utility.h
+++ b/deploy/cpp_infer/include/utility.h
@@ -110,4 +110,4 @@ private:
   }
 };
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/cpp_infer/readme_ch.md b/deploy/cpp_infer/readme_ch.md
index 8d6669d03..838966b71 100644
--- a/deploy/cpp_infer/readme_ch.md
+++ b/deploy/cpp_infer/readme_ch.md
@@ -222,7 +222,7 @@ CUDNN_LIB_DIR=/your_cudnn_lib_dir
 **注意** ppocr默认使用`PP-OCRv3`模型,识别模型使用的输入shape为`3,48,320`, 如需使用旧版本的PP-OCR模型,则需要设置参数`--rec_img_h=32`。
 
 
-运行方式:  
+运行方式:
 ```shell
 ./build/ppocr [--param1] [--param2] [...]
 ```
diff --git a/deploy/cpp_infer/src/args.cpp b/deploy/cpp_infer/src/args.cpp
index 28066f0b2..42ef4cea6 100644
--- a/deploy/cpp_infer/src/args.cpp
+++ b/deploy/cpp_infer/src/args.cpp
@@ -73,4 +73,4 @@ DEFINE_bool(det, true, "Whether use det in forward.");
 DEFINE_bool(rec, true, "Whether use rec in forward.");
 DEFINE_bool(cls, false, "Whether use cls in forward.");
 DEFINE_bool(table, false, "Whether use table structure in forward.");
-DEFINE_bool(layout, false, "Whether use layout analysis in forward.");
\ No newline at end of file
+DEFINE_bool(layout, false, "Whether use layout analysis in forward.");
diff --git a/deploy/cpp_infer/src/utility.cpp b/deploy/cpp_infer/src/utility.cpp
index ea5ef1e4a..32643e620 100644
--- a/deploy/cpp_infer/src/utility.cpp
+++ b/deploy/cpp_infer/src/utility.cpp
@@ -422,4 +422,4 @@ float Utility::iou(std::vector<float> &box1, std::vector<float> &box2) {
   }
 }
 
-} // namespace PaddleOCR
\ No newline at end of file
+} // namespace PaddleOCR
diff --git a/deploy/docker/hubserving/cpu/Dockerfile b/deploy/docker/hubserving/cpu/Dockerfile
index ef1a7b7b1..030ce24fe 100644
--- a/deploy/docker/hubserving/cpu/Dockerfile
+++ b/deploy/docker/hubserving/cpu/Dockerfile
@@ -27,4 +27,4 @@ RUN tar xf /PaddleOCR/inference/{file}.tar -C /PaddleOCR/inference/
 
 EXPOSE 8868
 
-CMD ["/bin/bash","-c","hub install deploy/hubserving/ocr_system/ && hub serving start -m ocr_system"]
\ No newline at end of file
+CMD ["/bin/bash","-c","hub install deploy/hubserving/ocr_system/ && hub serving start -m ocr_system"]
diff --git a/deploy/docker/hubserving/sample_request.txt b/deploy/docker/hubserving/sample_request.txt
index ec2b25b1f..0b9ea369a 100644
--- a/deploy/docker/hubserving/sample_request.txt
+++ b/deploy/docker/hubserving/sample_request.txt
@@ -1 +1 @@
-curl -H "Content-Type:application/json" -X POST --data "{\"images\": [\"/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAA4KCwwLCQ4MCwwQDw4RFSMXFRMTFSsfIRojMy02NTItMTA4P1FFODxNPTAxRmBHTVRWW1xbN0RjamNYalFZW1f/2wBDAQ8QEBUSFSkXFylXOjE6V1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1f/wAARCAQABAADASIAAhEBAxEB/8QAGwAAAgMBAQEAAAAAAAAAAAAAAgMBBAUABgf/xABQEAACAgEDAwIDBAUGDAQEBQUAAQIRAwQhMQUSQVFhEyJxBoGR0RQyobHBIzNykpPhBxUkNEJERVJTVHODFkNV8CZWYoIlNTZjZEZ0hKPx/8QAGgEAAwEBAQEAAAAAAAAAAAAAAAECAwQFBv/EACkRAQEBAQADAQACAwEAAQUBAQABAhEDITESBEEiMlETYRQjQlJxsTP/2gAMAwEAAhEDEQA/AKOnzcbmlgzXW55zT5uNzT0+bjc83U4hvY8nuW8c0zHw5rS3L2HJujMNFPYmxMJWg7GR0HvuVNRDdquCxF7kZ4XuvKAMLVY7tUVI4anwa+fFu9iusSvgV+KiMa7YGL13NbhhT5ds3pqo36HktbN5tZklyk6X3D8WffT17VaaYaQxwphKGxv0i2rVCpfK6Za7Cvq40k/uHAX3EOfuKv3Och8A2wHIFyAlIqQddOQlu2S22yC4TjjjhhJyOOEEnEIkCSiUiEEhBKCQKQSJCUSuSCUInN0mLDm96AGbjjjgDgWERW4whIOKshIZBbitB2NFjGIgixAw0Z0CxDgrwHxexlTOTCsUmc5UQDHPbkVkyKEW2wZTSKOpzW+1P6l5z2laGWRzm5NhwnXkrJhpm1iV/HPjct4sleTMxzqi1jmZ6NpwmmWIOzNxz9y5jyGZrkRqK8JpjUyaqHI6iEyb2M6bm6CixTZ0WR0lzG7QyyrCVDVO/I5oLMJbDk7KkZUx8JJh01iA5MTDgaiouCTObOvYBvcKBWdYFnNkmJvYVJhNipy2JpFZJbFDNO5clnNOkzOnO5vc59+6ztOvYBsFSBcgxB0TYtshsBs3kHXSdiJ7ByYmcjWAExE2HNipM1kJXzCo8jcu6FpUyiXMD3NTA9kZGF7mnp3sjm8kONPC+C5BlDC+C9jdnHqNIswQ6IqHCHx4M+KEkEkciUPgcQ+AgWIAYDGMBoCA42LcRr2OaTHKSu0RQ5oGhkXRFDaIaKlBbiA4jqIaNpSIaIoa0DRtmpsDVHJBUC0dGamxxBJ1GvSC0QG0RQupRRFBUTQugNEUHR1FSgANBNUQaQnEM5ugGyglshsFshsmnwae4aYhS3GRkZbpmp7HNgp7HNnNrS3zrDkprc0NPm43MeEuC1hyNNbno6gehwZeNzQwZLrcwdPlutzS0+Tjc57A9BhlaHWUNLktLc0YpNCgRF7jm7iV5pwe/AUZ3tYEHLjTXAhYty09yEtxU4z9fF49NOS5rY8qsLTuS3PZa+HdBQ/Ex8mmp8F59KY6wtu2F8L2NB4WvBHwt+CujjPljrwUteqw37m1PFtwZHWEowjC927Kz7pa+MruIciGjjckNgu2FR1MACjqD7SaDoLo6hnbsQ4h0AOCcQaoA5EojclASUEgUEhBKCQKCEEkohHN0mIAbt2cccMOOOOAOOOCSAOirHRREIjkiLQ6KHwQuKGpGVMxDE6FoKzOgxPYiU6QtypCcmQJB12bLSdMpttu35CyTbYKN8ziUoJEIJIAOLLGOZWQyLoihexzLMJ0Z8JlnHMzsNoY8nuWYTvyZ2OZYx5K8kVXV+MrDcqRTWWhinfky0Omt2TF0wE7CT3MdULC4sJMVB7DER04YmPxt2IXI/Gi801uD2GpiYvYNM2izGwWyLIbGYrIbBsFsQS2JyT2JnKkVs2Sk9yLE1X1WSk9zP77Yerzcqyl37mX56ytXFP3Ocr8laOQPv2CZ9jprkC2LcyHI1kHUylsJkwmxTZrIYJMU2HJsVJmkIufIJ0mRYyNxumaGnnwZcXTLmDJvTMtzo62sMuC/hfBlaed0aWF7I49Z9tJV/Gx8HsVcbHwZncrPiEAmEmLhpOOIYcJDBYTBZNgC0DQTIognNWC4hE2PoKaOSGtENDlBbiC0NohqzaaIhoBrcc0BJGkpF0QyXsC2b50moOObORvKTqJolIJIOp4XRNDO06hdHAVQLCYLdFSjgGwWyWxbkayk5sW5EOQqU9y+gbkC5iXP3AeT3JtCx37hxmUXk9w8eUy2caCmS5bFWOT3GKVo4t3i4+dRssYk7FY4Nst4sb2PZ1SPwtpF/BNpoq4sbos44NM57TbWlyVW5r6fKmt2edwTUUrdF/DnT4ZAbj7ZR5TF9lPZmf8RuqZYx5pPZsOhYpolbOzoST/AFnRLj8radgSvlanJ7rbaivPGmMyY3bdNCX3xezH05S5YUxcsKQyWSa5Qmed8dodPpc4KMW3skjzHUJvPncktlsjc1eTJki48LyZk9P7DzvhX3WRLG0LcaNPJgrwVZ46fBtN9CrQSQbhTOUSukFRJURiiEok9BPac4FhQvwc4C/QVnGgHEtOApwKmgrtHUNcfYGi+gCCOSJoCcggUEIOOk9qOIfIBBxxwBxx1EgHJWMhHyRFWx8I7EWhKQaRyQxRM7TdFDEjkqCSJod4OJIeyJBeRlfJLahuR0ys3bNMxKDkckEiw5INApBIkJRKZCJQgbGVDYzEIJWibCW45GPhkooRkx8JEWGvQnbLOOZn457lrHKzHRrkXYxMrxkNi7ObRn43vQ5FeDpliLtEHDI8lrEitBblrHwaZVDkGmAgrNosVnNkENjCGwG6Jk6FTlQAE5bFDU5KTLU5bGZq50mHEarN1Oe5tWKWS/InK7yP6nJOx/mcYrSmMUyvBMakyOGb3HNgKwqsqKQ3YErGNASRcBMuBU2OkhMlsUCWwLDkqFspJkWOg2mmV0w1ImwNbS5baRsad2jzukb70b+nfyHNrPteV+DLEGU8b3LEGZ3LSLSYaYiLGJkfkzbOsFM6xWBzIfBNkMz1CCzjmcY0Oo6jiaEEJBpJnJBJDgKaBdoe0hbRYLaT4FyQxqhba8mmdETJC2Pkr4EyVG2dEg5AhI3mk0cdw6Big0i+k6iGgqIfAdBbQqew6QmfDDoJlITKZOR0ytkyV5LmiFKdCMmRIDJkq9ynky+5pNEfLN7inm9yrLMr5FPMvUq0Ljy7hwy7rcz/AIy8BwyNmej418eS+CxjkZeHIXMU7OLyQ48xjw+xdw4XtsOw6e62LsMKiraPSulEY8Xqthk6gtuQsk1BUt2Iacnb3IItzld2WdPntpN0xLx2gFFxewybunm3yy5H1T3MrSTlSVmhCTrcztNa76QKyu9mIlOlyBHKm+SoVi58RtU3YLVgQnY1NMVIl478CZ4b8F5RTBeOybQy8mBVwVMmn9jani9ivkw+xn+jYWTD7FLNhq9jdzYavYz80N2VnZsbJjp8AUXs2PkquNM6c67ABIfhwTyzUYpts7DieSSSVtnrugdH3U8kd36i1r/hydVNB9nZZIJuLbZQ6z02WgyxjKLXdxa5PoGs1uj6RpO/PNR22S3cn6JeWfPuqdRy9T1Us+VNK6hC/wBVen1J+fT1yeoy3ABx9iw4guJUqFSUPYW4FxxFygXNEquNHUOcAXD2L6CqOoY4kND6AENbhtEND6A0cTRNAAhJHJDIRthaE44eSwokQjSGxRjablEYokqIxIi0wpHUGkdRIA1QEnsMkqFTajFtjhKuaW9IUiW3KTbOSN56iXJEpHJBJCDkgkiEgiScSkQkGkAckEkcgkiQlIbFC0hsUTQbj8FrGV4ItY0c+zh8EPiheNFiK2ObSkpDoOgEglsR01jG9y1B7FLGyzGZrk4sJhJiVIJM2ipTbObAshsqKRNiJsKctxWR7BwrSsktmZOtns0aOSWzMnVu20NnpnqLbHQh7BQx+xYhAKgEMfsNWO/A7Hj24Hxxkq4qrES8deC4sfsc8fsB8UXBguBeeP2FSx+xQ4oyh7CJw5L84CZwKg4zskaENUy/khyUsiouJpbdDIO2Ka3DxrcdhNHRr5l9Tf06+RGFol8yN/AqgjC/VZPWzHQYpKxiTTJuWkWIsNMTGxqIuTNTJTATCRnYBWcyES+DHUAWcjmckc1CUgkjkgkqAOSDRCRI4ESFy4GNC5IsEzFSHSFSHAW20C6YTFsvN4kLjTOQSd7M6lyjaaI2C2CoHGxlGsoRQLQdENFygiXInI+R0+RE+BwKefhmdmyNNps0c3DMjVurKiScuar3KWbNV7gZsrbaRWdt22b5z/0CllbewKbb3ZFEpGhGxY6DqhER0DPQW8bplzDIoY2W8T2OXcNchiUFwBkl4Q+bb2SpCnD2N+qV+ze2d2V4LHaC4h0EqByxptUhqQ3DFNitIenx9tbF1R22QuMapFrHC0QqRnarI4Qe+7K2PM7VssdUxytNLZclCCaNp8TfrUxZdluWoZLMrHJplrHNqiaTShJMYnZTxz9yxCRjSMaTQrJBU9hqdoGStGdps7PDnYzNRjpvY280dmZuqhswl9mxMqqys4OU6SLmog+50i70npstTlTadWdUvIa10HpTyTWSa29z1up1Gm6RoJZ8u0YrZLlvwl7k6XT4tJpnKbUYwVtvZJep4XrvVp9U1dptafG2sSfn1bXv49hyf2q38xV1+tz9S1ctTqXcntGN2oL0RXSOSCSFagNENDKIaDoJaAaHtAtFSkQ4gOJYcQXEqaCu4kND3EFoqaBDQDQ9oBouUi69jqDaOSH00KNsdCFeDoQ3Hxj7Ea0ERiOiiFEYkZ2mlIYlsckFRBho5oLgGTSAiplLUZLfYnsuR2py9qaT3KSbbtm2J/abXIJI5IJFUnJEpHJEiCTkcgkhE5IJIhIJLYQSluEkckEkSEpDIIFIbBbk0HQRaxoTjRaxo59qh+NbFiKFQWw6K2OeriUgqJSJog3R2Y5MUluNRpgjExiYpOkSmbwzb2BlLYGwZPYo+hbsXke24disjVDJVzSqzOyK5F/Km2yp23IEUMIew+MPYmER0YioTCGy2LEYX4OxxVD4QEotY/Y5wvwWVDbg5wvwUao8fsKlD2Lzxi5Y/YAz54yvOHsaM8fOxXyQ9ioGZmhSexn5IbtmxmhaM/JCmXEVTcfY6Cpj5QoFKmFqWhoo7o3cK2Rj6GPBtYVwjFeViEdhiR0VsGluNaUg0iIxGpE0wpBJBUdRjonENhEMw3QElIitw4o5DEkEkckShwOo444oOfAuQbYtjBUhMuR0hUg6CmwGrDaA4H1IeAkyaOoP1wCT3Hp2hCDg6Ztjy/8ASMpkNDFUlaOaOrNlNWmitkVJl2aKuWDa4NIGbn4dGXqYOVm3lx87FLNiu9jSJ485nxU2ys0bWow2nsZmSDjJqjXNIijg2qBrcoCiNiLSoZEikfB0yzjfBUgyzjfBhuBuLH7AyxpIa3QuUti1lNJITOhk5CXK2BO8jcTp2KW7HQVIVC3jkm0XcdJXZmwaTsa8m1W0iOrl4nWtTtLcqw06rdBzyJKxa1K8sf6TRPAqOimmHHLGXkaqfoy+hEG0yxjlYtY74DUWmTqdLiwmSxaYzwZWEVNJplDUwtOjRkrRXyY3J8DmVRkYdI8+dRSu2ey6ZoIabCtt/JW6ToFB/Ekt3x7Iq/avrMtLheh0cqzzXzzX/lxfp7v9i39DfMV89sv7VdcWpnLp+kn/ACMZVlkntNrwvZPn1Z5pMDjaqoJMqo6NBoWmEmSBHHJnEhzQLQdHUPoLohxG0dQdBLiA4lhxAcSpolZxBase4gOPsXKCGgoxsY4B44Dug6EKSHKHsFGA1RM7TAoBpUEokpC6YUibCoFoAGToTOTpjZIp6vJ2R7VyyszpX0q5p903T2QKQKDRv8QlBpbkJBJEhKRyRKQSQiQkciUgkhdDkgkjkg4xJDkhiiSojIxEaFGxuOBMYDYImgcIljGhcEWIIx0o2CHRFRQ6JhVQaCohBGdNCW4aBC4ReQmzkwG9zrNoRtgzlQNsCVtlw0uQqbbGJAzVIoqrSVpi1DexvNhRhYkgjEZGIyOMaoewK47Ei1COwmEaZaxoUNyjvwF2hpBKIzIcAJQLTj7C5RHw1HJArTgaGSHsVskByEoZIWihkx7vY1skCrlx+xSaypwoUo7lzNDfgRGHzfeTWdaWijsjVxqqM/RRqjSiuCGmfi5CmhiW4nG6SLEGnVk3ShRiNUSYpDEkjK6kBbVA0MaIaMtb6AUQwmRRhrXQFLcNI5INIyNxJBIwg5nENgAtgSYTYEgAHyLkMYth0i2gaGtEULpBUSVENIlInpg7DlEakT2ocoBBtMekpK0LqgoumdHi8nPRBlATOBbaTVoVKJ35oUMmO72KeXHzsauSBUyw5NYGPnxWnsZGrw1bSPR5cfsZerxWnsXKmsJrwCkOyxqbQuty5UuSDRCVEoVBkfBYxvgrofj8GOibkpipz2Ankor5MteTSQ+jyZAE7YjubZYwxsL6B+ONjkjscNixHGZWrhShsc00h7SSM3W61Y7hDd/uCZ6fqJ1GRQg02rZQedXyVc+bJkk3Jv2RWlOSNJ4kVqLVKPL/AGljDrk3s9jzk5Sb3ZOPI4PZtF/+U4HssGrT8l3HkjI8jg1UlW5qafWN7N0Z3FhvQpJ8M5ppbFDDqU1yWo5b4ZHDFfhj9Ph75q1e4uEPiSSSNTDCGDE8mRpJK234KzBBarOtDpHKKTyNVBer9/ZHjdRpp5JynNuc5tuUny2/J6DJqFrMrk9vCXohctKpb0jVX15LUaBrdL9hmzjLHJpqj3GXRpqqMbqHTbi2luLvEWMBMNMVkjLFNwlymdGVjsI9MNMUmMTIpiRKR0QkRQ5IlIlImhdAaAcR1ENB0KziA4lhxBcC5QSoWx0YUgoQGKIroBURiRKiEkLphSCoJI5oYAyJNUS9hc2OArNNRi2/Bk5JvJNt/cWNblbfYn9SqkdGJydRUxDSBSGJFUhJBJEJDEiLQlIlRCSCSI6AqJKiFQSQugKiMSOSGxjYrQiMR0YnRikNSEaEhkUcosbGJNppgixBC4RHRRnQYkGgY8BrkyqhoIFMkzsNzYSewts5PcIQvJJyVnUbQOOaOONIEeAJvZjKsCadDCuluPxrgXGLssQg9mMGxhaDUAsK3pof2Kg4ZKhTHwQPbvY3Gtw4BVuHGNnVuMithqLa2FSiWGgGhhVnErzgXZRETiPhKU4lXLDk0JoqZY8gTLzQ3ERh8xeywFQhvwRUVc0kWki9FcFbTxpFuKEqH4VaofFOxGLZotRRl5J1Y436jVwBFDEji1CdRDCBlwSAs6iLCTFZ0nJEnHEqSccQ2qEHNgNnNgtgENgtnNgtkkhgslsgXQiiUjqCS3AnJEpHJBJAaEiUjkgkgAWiBlbANDl4B434YUoilsyxH5o+56Hh32cJWyQK2SBflEr5IbHXAzMsPYz9RjtPY2MkFuUc0Nmi4VeZ1mOm3RUo2dbj2exjS2bTKiHJEpEJhJhQKI7EITHY3uZ6JbyZBDk2yJO2TCLb4NgZii2y/gxitPiutjSw4qXBlqrkFjhQ9RSW4UIJIr6vMscaT3fCI4v4r67UKEe2D3f7DFnBuTb3b5Ls25ybe7YLha4NZ6SzsmPbgrzh7GpPHsynkh7FSkz5woU0XMkBEo+xpKSMWRp1Zfw5OChDDOc6Sb+42tB0rPmaSg37JWTvUh8Nw5Xao1dI5zaVb2O03Qpxp5HGH1e/4I19P07HiaeOSk63/uMf1KfB6PAopNoo9S16yZXgxv8Ak4um15fp9w/q2remwLBhdZJrleF5ZhQT2K+CrsObWxpaWamkmZmHwX8Kppi/RxfenTVoo6rSJp7GrgdxSYWTEpLgOr518765oGk5xVNbnn4y9z6P1bSKWKSrlM+cZ4PHnnGuJNF4vfTKzlNjIbGRVixsZDuSWkw0yvGQ1MysM9MJCkw0zOwDpENHJnNiAWiErZLDih9DlEJRCSCSF00JBJEpBUUYKIaDaAeyHATIq6jIoQbfJan5MnWZHOdJ7I0xO1NV23KTb5ZyRyQSR0dJyQxEJBpEkJIZFAxQ2KM7QlINIlIJIkBo6hiRPaLpIih0UDBDlERuihkYnRiNSJ6aYwGxidBDEhG5INIhIYkTQlIk6jjOmlOie8GjqM6QnNe5yasVLYFSpkdLq0mSwIu0d3GuaYyVuL7g47s1hjSO7LDhG1wNjEuAhYqd0OjBDFAJRrwUYVGmPS2AS3GpbIfACgocktHRVMDMrcYlsAuQ0hhzQLQxoFoZkyQjIi1JCJoCVZorZY3ZelEr5IipM3JACEN+C1khuRjhuRU03FGkPigYRpDYoSoKOzsuY90VUixifCFqdij0g0tgUrDOTcCHwC+AmC+DCkU3TJTIkAnuIj07JATDsVhuIbJbAbIMLYDYTYDYqSG6BbObIJDjkcSlYglINI5IJIAGgkiaOGEUSjjkhhKRDVhImrQGVVMZjdMFomDpm3i1ylTpRtCMkS0t4iprk9TJKGSPJTzQNPJHkp5YqmawMPW4009jz2ojU2er1ULTR5vX46k2kOfWdUkwkxaYSdDpGJ2NxvdCUxkHuiNA9Jt0XMGFunQOHDbto0sOKkth60cgsGKq2L2KFLgHFj9hzahG2ZNJAZprHDky815JNssZZvLPnZcAONlT0FXs9jnDYsOCSAmkkMKs1syllStl3I+SnNNtlJqrODb2LGj6bk1M0oxbv2Lmg0Pxp3LaK5PTabHDDjUMcUvV+WZeTzfn1DkVND0LBp0pZvml6J/vZqRrGuzHFQj6LYNJqF+oF03fhHHrd0sUWlswlJpqmKW6JttilAs2n0+pd5ItT47k939RD6ZhrbI0/eP5D1LcNyvdfgbTdKqa0Tg9pRa9nX7xuODhVtfiFJ7i75L/AHSXMWaMOWizHVYmqt/gZKYakH7sOVc1EIaiDjFpv04PAdZ6NnwanLN432ttp0e5g73sbUckHjyRU4PamrQTzXNFnb18kljcHTRKdHuOtfZmM4yz6JXStw8/3nismN48jg0006aZ153Nz0mzgosbFiExkWFiVhMNMQmGmZWGcmTYtMJOyeAa3YyKBghqWxFMSQSWxCQSQzckTRKRzHAFi5cDGLlsrKgVdTPsg65ZlSTbt+S5qJOc36IS4extn0XCVE5RGqBKgV+i4BIYkSo+wxRJ6XERQ2KISGJEhKQSRyQaQg6jq2CSCS9hEGCpliC2FxjuOhsIxpBpWQqYaW5IMihiQMEMSBUcluGkQkFZNCTkjluSkkjOmitgWE2A2Z1IZMU3TDkwGZkfjlaDe6K+N0x1muKHDMbdi7GQdM2hruGmhqjuV8Mi1FplxUSlsSkEcluWHKNsZVI6KCZRhaQKW4TOXIASW41IBLcYhhzRDQdEUBlSQqSLDQqSAK0kIyRLbQmcdhFVOcPYiENx8obnRgTYTlGkGkEkSkTwOSHQ5QpKhsOSapZg9ggIMMw3AhgvgIFo5bCLmthfDGtCmqYiHFhp7C4sO0KhLYDZNgtmdMLYDZLYLZJIbOOOSJDkhqQKQ1IIbktiaJomigGjqCaIAIomjqOoDcEuAUShwOaBWzDkC+TTP0U7G9iZoDGxr3R6XjvYlVyRKmSHOxoSiV8kPY3gZOox2mef6lg2bSZ6vNju9jI1uDuTVchU2PIuDTJpl3NgcZvYV8Pfgf6QQkxsE7C7A1CmTaG7hw1Tou48fGx2LHxsWoQSVsn61kcoqCtlLUZXOTiuEx2pzf6EXv5KqjuMV0IBNJBpUrBkAKkV8jHzYie7oCtV5pt0kN0+geRqUlS9SxpdL3y7pLZGpCKpJKktkjPfk58JXw4VBUlVcGhhg2k3x6iEqkki3Dwlx4OXV6qGtqkk7oTJ0n6tpf8Av8B3lX4FZIUt/WyDCpUqOnJKKpbt8+iBStr08nNqTTTHPQFB7DE1W7S+oreqRL3W/gqUCat7CpNXQabi7TByKm2zWeyAEmqpANtkwTKI/FzbZZTpor49hyd72RqdUen9x8++12mjh65kcIpLJCOSlxbVP9qPoFppU79TxX2wTfVoeawR/ezX+P6tgvx5dbMNM6caYKdHX9ZnJhpiE6GKRNhnJjI8iIvcfBbGd9A6I5IVAcjPhjQUQUEhmkhknMDAyvqZ1ClyyzLZNlDK3ObfjwOBX7LOcByj7EuGxX6HFfsJUBvYEoh+iLUAuwYo7E9vsL9ClJBpE9pKVDlQJINIhDEgDlENQ2Jig0g6ApUSlRLVHAQkxikJQaewjWISHJlSLpjovYRnWSmLTCuvJJmJktinM5zIpdE2C2C3ZDZnSQ2QzmyGZ0kxe41OxCdBplZoOTCT3Fp2EnubQ1nFOmi7CdpGdBlnHOjTI7xcTDiyvGVjlI0V05NBXYpMK9ijSyUgVuw4oOgcUNSASDQ1RNHUSiSjLaFtD2hbQAhoVJFloVNCKqrRKiMcdzkiEho6g6IoQQFF7kM5OmTVHxYxMTB7IamYahDIfBKOZz6gKaFyW45oXJGJAiFYLRAf0BNgtnWC2Z00NkEPkkikhchpEJBpCMUUGlsQkGijdRKRxwwhkBUdQwA4JgsOE45MiyU7HIBWC3ucCzSQxRdMsxdopp0yzhdqjr8N/pImthU4WWKBcDthqGSHsUNRhtPY2Z47KmXFa4Ck8zqtMm26KE8FeD0uowXexm5sG/BlfSLGO8dHLHuXp4a8AfC9iLpPHosUElYGoy9ipcsLLkWODKVucm2ateoSbdvew0g4w2CcaAAapCpDZCpACp8k4cDm7a2XLCjBzkkkXsUKSgl9fcz3vkLjseNNJJUlwOhBxW/kOMFBK02wlb3s5e9+mrJNNvyWcLSSTFZF89LikxsE2tgohydsjLODpKS2VMVKV7J7fvBeyF8PostRikmnau16C4LYnLVQS5r+IMW6YX6Q7aYSdqxSm3yEpMQG3aoGdtv8Ak4xdy2fhLkU8iT4b+81yEqL9AorcLFOE0+U0cpK9lRqRidLZBJ3yAmgkrYudM2LPLfaPF8fqM5x3UYRh+C/vPTO4Qb8+DD1OFubb3bbdmmJ+Q8nmwSTexVnBp8Hpc2nTvayll0ifKN5SYlkpl7Jo0m6Ql6emV2JRj3LURUcbQ2KoyvszoDkKhwNiSoaCQKJEBHAnN0m34BUJ1E6VJ7srpBSl3zbJSEEKJPaMSJUbJtMnsCUB3Z7Hdov0RXaT2jVAJQCaKq7iC1Q9xFtFSooE9xsGBQUUaEfFoZETENMXAN8AnWcMOCTAs6wI1NBqVFbuZEs3Zt5FwdXHkSRHxGygszb3YxZLDhdXFMLvKin7jFMzsPqx3HWKUgk7MaBnHI4zoQ0ctmS0dQ8gSYabYuIcTbIMi6HQl7iIhxZtIS1Gb9R8JtlOLofBmkHVqMg+5sRGXgbHkVq4bEbFiUw4sjqj0GhSYSkaSnDVySAmGmaRSGrBaGAtACmhc0OaAkhAhrclLYOiKEktoig2gGqJoCwWwpAMimbCXA6LKkHuWIOzPRVYRzIjwSc2wFoBoYwWjnoKaoAZJbC2IkMFslsBtEUJOBslMgDSGRQtMYnuBmJBIFMIo3HEnDCDiSG6AIYDZLe4LY4SLJTBIsuEbdgs5M5lHAN7ljC9xD5GYnTRv4rypq6laJ7SIMNLY9HM7FwtwEZMfOxcaAlHYdgZWbF7Gfnw87G5lgmUs2PnYy1E1h5MVXsV3jp8GtmxV4Ks8e5z6RSp5Hlnb48DMa3EQRZgjoVD4LYlrY6DpHSaAyZIU4tvYbJ2M08E5W+EK3kBunwqMG3y/2B6dVl39Tm6V8K6OTcZJ+5y6ttB7VvcjjwMcGla45FN2zO+jBNXNP1SGNqEK8vn6HOOyk/Gwmc23uvvKpJbV7uvY5StpeoMVb3Z04yim0t3shSdI2MHkba8C5PtlXk7DknFdtk0q726Xq/I+AuMXJ7Ic6ikk7fqLU1dJUv3k3u0KhNb292DNWvcN290RV7tbFZFRh+WL9WGuQU/YZFbbmsAluPhG2LgrfFDmnBG2MdMGZOrW6Xgz9RFNX5RpKaunwVtVhTTcXya3IZMsalwJyae72Cc5Y5tNPZj8eSMlyhQRl5tM6exUlgab2N+UItFXNiVPgo+MSWOmQolvNBJiWiKkMUNQCW4aJMaOORDaFwJvcRqMlLsXnkPJNQi23wUXNzm2/IcB0ENSFwHRVkWrElYxROhEcomN0C1AnsHKJ3YR+gUoBdtIYoexLjsOUK00qEtIsTRXa3Nc1nQ0ckjmBbTNoR8Q1wIjJjFMZDbIbBsFtoAO9iLBTsDJkUFtyPhW8FkyKCpbsrNtu2C5Nu3ySh8SKwlJgpBJADYzGxkISGRM9GsRY2IiDHxZhRDEEgUGkZm6jqCo6ghhSDRFHV4N8lRpoJMWkMijaQjFwNi6FKkglIslrG99xyZShPcepoy3VSrHfQSmVXMF5kvJh+l9X1k2CU7ZmrUJurHQzJ+TfOhLGlCVjYso48ifks452kbSrlWCHuCmTZZoaAaDbRD3EC6IaDoholJTQDGtC2hUFSFsa0LaM6YU6Y/Gyve47GzPRLUWGKg9hlnLuhwLCIZhQW/IqWw1oTk2ERcnQDkROW4mUyKDu73OUiv8QlZERyl1bUhiZUjMbGYlLaYaZWUw1MfTPtHX7iu8jvK6DWwXIW5kOQdA2wWyLOsqJcccSkXA5BN7EJEtbC6YXyFB00C0FDZm3jvtK7B7IchGN7Icmer4/ioIhok42s9Gr5EVcsbTZemtirkXJjqCs7MuSpNJ8ov5o8lLIqbObeWdZ0Gh8GinjmPjP3NuHKtKdIBzFd69QXPfkAcnbSLeOPbD3Ken+af0LabSZj5aYnNdiTfDGpwcLvcqZG+2l6jdO22kzEursZpQ7WrdKmKa+ZUdGXdJvx4JnLsV+Xt9xP2mjJNJJK6X7xE5Jpvcmc2luhE8ndshptNhk4SSLGO8kHa4K2KLpbDpzah2wdp8jgA3HG3St/sQuTc2m3Zz7m9w4xbVJWxhCTfgNQk+FwNjCKVPd+gxb7cL0QvUPhaSiqk69lucnF7U6GSSfhEpKg6YKjXyp37kxg29wkqdoKN3uP9A1VDC37pHKSkqFa2fZom/LaX7Srh1F+Tq8e5JylVjIux+xDfdBq/ATmpwabKiy9mVwb54NrofWNqpzx6ia5V7CPjtO0qZe1+LubaW5nVZERPXo9a2aW6sDJre5U4NA9guWL2BcJyZO5tgW2xjx0R2UKmhINIhIIkIbpC5S3CkyvmyKEGx8K0jV5rfYn9ReMQm5zbfksY1wVZyFKt41wWYIrYy3i8HLtcNhEcogwQ+KOfVUFRontQyiKI6ApAyVINgT4KlKq8xEkPn5ETNsopTAb3DkKkdGUiTDTFRCTKI1MGb2ITsHNNRj7gKGeRQW27ENtu3yA2222SmXEDXIxICCsfCNgblEJQYyMBij7CMlRDSGdm3ALVE0xQHQER5Hw8GGoDojEgIIdFbGVORCRKQVAt0gydgXSIvc57s7g6MoqUEmwUworc3hDTbCSbOirGRiTdAKTRNtDO0hxMdU5CpZJJFfJkk/JZnEr5ImR8VpZJJ2mNw6tppNiZoS072LiL6b+n1CaW5fxZb8nmNPmlBpXsa+n1KaW5tnTTOmzGdh91+SljyJpbjlkXqbdayn2dYrvXqSpr1AdMRDYKkc2InMBom7OfBNoKkqEyHyEyMrQW9gsbpgMhSp0ZapLsGNTsq45jlKzk3ozbIZydokzMEivk8lmRWy+Q4lUyvkp5J0WMzasoZ5tDmb1Fovjb8hLKn5M95KfBHx2vBp/5o/TWjl9x0cl+TGjqq5Y6GqT8oz14ac02Fl9wllRlx1Ka5GLUJ+TK+Ormml8U74vuZ36QvU741+Q/FP9NFZPclTvyUI5vcbCdl4wOrilYaZXhKx8WVYY0GlYMRkUIOSJa2CS2IfAqotkLZhSIXJt4/qKuY+ENTFYv1UG9j1cfDhqOATCs36oM+CtNFmb2K8zHVCplXJRzR3NDIU80bsx37RXmoTXqMWTbkpKYXxPc34jq58S/J3xPcqfE9zlk9xcHWxoZKWSvVFzJ8sbMfQ5Wsya9TXlNThsc3lnKuUt7pb2m2PxxqDa54FY43Gq3THxi6S8WZUjcKVW/HIOR97brYJukor7xbe/JBgcbdNELCk+5ukOSSVtANPI+H9C4XC8k7+WCpefcLGm1XqMjihHebS9vIfeo/qKvd8itg46GBtW9l7hNKCqK29Xyxbm5Ldslzn4b+/cP1DTzvwSpNHQkmt4/hsEpwXMH+IumG29mHBMJODWyd+50Y27YreBMUm95V9UM7Uqpp36Edqex0mscHKTSS5HL01LqmbaGBPj5n/AoY5tPkXqM/xs8p3s3t9BayUzPW7dItaWPI/UVqotpTXKEY83uP8AiKSps68atg6VKay4/fyUMmOpMtbwyNeGdkgmrRvCVlElwtBpBUBxWljFOHsXXEXKHsKqUmqBeyH5IUV5sQLk9jN1eXul2J7LkuajIscG2/BlW5Sbfk0zEWmY0WcYjGixBcE6Czj8FvG+Cpj8FrFycu1SrcCxErY2WE9jl0rojjrOJHUMXMY+BOQrJEzYie46YmR05iCZeRbe4cxfk6ISbOsglIYSnSsTmlfLGTdIrZHbCFQ2EmAuQ48lkdBF3FC0injW6L+DdCpmKG3BPb7FiMLWxPwn6CCuoWBNUy44Utyrme7QqZS5LGNW0V4clrEuDHYWMa2G8ICOyCbMFxLYt7snlnNUXCqGqIe5zYFm+YimJbbhxpCE7Y2JpSPh4HREwGJmWqZiRzSoHuIcjOyqRNIrzSGzkV5uyplJORFdofJ2A0XMkUlTH48jg9mLogfA1cGrTpN7lyGdNco8+p0xsNRJcMcqpqt9Zk/ISyr1MaGrfD3Gx1N+R/o/02Fk25J77fJn49Qmqscsl+R/pUvVxS9znIrqfuGp2TaYmxUmE5C5SMtUAkxMpU7CnIr5J+5z60m1ahkXqWYTvyY8c1Sqy7gyX5OTdomutOLtBplfHOxyYSrE+CvlWzLArIrNsTpM3NHkoZ4WmauWF2Us0NnsdU8fpnWNmi0yu3TL+ojyUMipsJOemYXIHva8gNguRfAcs04vZhLVzXLKzZF2P8yhdjq29nY6OpbM+BZxRtmes5gX8eZtF3DJumUcMN1saWGGyMbJGkWcSbRbxoRjjSRZgtjKtDIoYlRCQRNNBzOIkQASIXJz5Jitzo8c9pq3iWyDa2BxLZBvg9THw58CnTDTFPk5MfeGObK82MnIS3ZnqmXk4ZWmrLM+CvNGeql4VSIeQU3SFObvk7eMVr4hPxPcpPId8Rsf5HW50rU48erg8iTVq79D0HwXCbS3i+GjyOgwzzZV23zyez0EksUMWR2kqTZy+bHv00z79Bxwp+eS04dqT4LkNPBNNJOwdRicpUuEYaxyL4zZRbfqMhh+W5NL6j2oY1vuytklOb8v0SRl8J0pwi6tt+iQt5W3SVL2OcG93F/gCoO6apB2knu3DW68L6sBppktNonhOU1foNSTSdiXB1xuMhBpchw4NUkSqYNV7nbv1DhmJK9hkbSFQtPcct1uAEm6syOr6pxrAnu95eyNrHDudM8nrJvJrs8m7fe19ydFc9dK30FPY5tkRCMkOUqY7HPcrvZhQnub+LXBVxx7o2uTou1T5RGKVqiZLtla4Z1w4XKNM5IdJWrAoZgohxsZRzQlKmSGxRzKmzUyRtGZrWscG/IFWLrcnfPtXC5ExQU4tybfkmKNfkSOCHwQqCHwRlqg7Hwi1jK8CxA5tqixB0PTK8WOjwc+jNTslAomzMJb2E5BjFyNMxNJkhM0OkKmdOSV5q2La3HtJsXNpG0AEqOcq4AcgWxl1E3Yp7sJsFjhBGRW4KQcSiNhs0XsLqilj5RdwqyTXsc9h3ekitjdIJypCNOTJeyKeR2xrbK83vQGPHyWYNIqRdDoS9DLUJbUwlbFQtvcamkZ2GYlSBbIcgWy85K0LYDZLYDds6c5RRpjYsQnTGKQWCLEXQ1S2KykF3UZWKPcwHMU5sFzDgHKd+RUnZDkA2VISWQyGyLGHNAs5sCTF0wydAfEp8gzkJnKiQtxye46GT3MxZGnyWIZL8kk0YZK3TLWPPeze5lQyDoz9wDXjlvyNWTbkyoZmvcsRzprncm1U0vPJtyLlMR8VeouWVGej6bkmknuVcmS/JGTJfkrymYWdqbXSm07TLemzXSvcz2zoZHCSaew7j9RPXpMOS1yXIStGNpMykk7NPFO0jl+XjbN6tp2RNAxYxUzq8HumqZIFPNDZmnkgU88Nj0s59M6xtRDkzM8KZs6iG7M3PC7M9ZZ1mTVMCx2WNMQ1uTA6zkcluMjBtjoTji20i/hhwKw468F/DDg597VIbhhwX8MdkIxQ4LuKJhdNIdjWxYghUEPiqM+qGkSQiRKQc+CQZDzCoGtyYLc5h41bOnxz2mrONUkGwYbII9HPxU+FSAbYcxUnsTokSdoW3sQ5bkXZyXfsOk9hTQxgtWRdh83nKlQlsObti2exHOgbp8TyTSSu2BFOTpG90rSKEFmkvoLV5Di7o8MdNiSpdzW7LX6Q4JteCtOV8C2273Oa6n1rn60undWlPIoNNb+Xf3G3k1D7VS3Z4jHHLi1iml8vLbeyL8PtFppNQbbadW1S/EjXdT/ABdHk3nk43ZOc23bX0R3w3VuTf3mbDqOmyQtZOyXuPx65zjTmpJPk4tW5+sVlxl4b/E5d6VNdy9yMeoi/Kv1Y2+5XyvVEf8AoAKMKdxa+m5zhBJNNtPykNST2fDJhBQtPdMf76fCU41tGvd7nKUrq7X0DcU22qa9ga3D9UhRipJWmvoH2wW3dX1REPU6St7D6Y2oJXd/RALm07RyVqrIUWraY+9JZxOmjxuSXfnyT/3pt/i2eo1Gpjg02TI3TUHW/LrY8pB7JFX1lOjY8B+AUEZVIZIWnTHPgRk23KzRVnFP3LSqUTNxzpl3DO0duL2CU6D5TIapnPZpoJ7qzRQCUcyFyJSJrYxtenOVLhGzmlUGZeWFttgVY2THuKSpmhmx1exWlCmPpBithsEAkNitiLQbEdB1QlDYvgx0FmDHxK2NliHBz6M1cHApkpkBL4FTYbdCps0zElTdJim2xrVgOO3BvKRT4EZG2WJqkV8iNMik2yGgqINSA0RQTRFCJFBIgJKygdiW5dwlXDFsu440kKnDk9jnujkiaEC5cFTI9y1ldIpTdsAOLbZYx0irF0NjKiLAuRnQantyVFMNTFMDqx3nOViVImzXOeFaJsgizkzRIkGmAmdYrQYmT3e4ruOciOGY5+4LmLcwHMRmOTIchTmC5i6DXMhzEuYLmL2DnP3FSnYDmLcxAU5CZSs5y9xbYBzYUMlPkU2Qm0w4F6GS/I/HkM6E2mPhNisJpKdhKXuU4ZGh8ZpoiwH/ABH6shzb8gJnMnhJcmLbYTBbIuTC2A2EwGxSEt6LNT7Wzc0+S0jzGOThNNG1o81pbmHmzy9Xi/024OxiZWxTtIsRdoXh1zTWibtCM0bTLCVoCcU0ex472IrH1EOTNzQ5N3UY7sytRjpseozsZGaG/BVcGaeSFsrvHvwc2vVJVhjbZaxYuNgsePfgt48fGxhvyKkDjx+xbxQqjoYyxjhucutdXIZigW4RF447IsQVGfVjiqGoWg0yoY0zgLJTKMT4Bb3JvYBvc0zkqkZjVsUnbH4ludPjiKsxWxzJXBDO2NP6KnwIm9h+R7Mp5J8onXwqBu2Gt0V+7cdBnneT6QmgaGNWgWjC0PmD5Bq3RL5H6XTyzZEknufQ94wix0/RvNNN7Jbtm41SUI7JbIjBjWHEscVv5fqXdLp+6SbWxy70uTpWHSTyOqe5HU3pumadZdRJuUm1DHHmb9vb1Zp63VYOmaOWozv5Y7KK5k/CXuz571DXZ+o6uep1D+aWyiuILwl7fvH4/H+vdXqzLtbrsurm20oY3xCL2S935EJ0BZ1nVJJORj1rdNzY5zWLPPtT4k/D9/Y159P1GOpY22mrTTtNezPKKTTtM0+n9Z1OjqCffi8wk7X3ehy+bwXXvKpZ/bYhqNThVNdyXryWtN1SKlWTug/VcEaTrHTtWuzK1ik/GRbX7P8AMt5Om4ckO+Oye6a3TPN3jn+2eNJL/S7g1Smk4yUl61ZdxzU1TSTPMz0Gp01zwybS3+V/wLWi6o01DOkn/vIy5Z8V3/raniafciFBT42flDceRZIpppprZrydOBUPhLg0/wC8hbOiZJqRFpcumOASTTtIRq9Rj0uJzyOl4XlkZtdDC+y05enoHhyafWUs2KE6WzqmvvOnxeOa+peV1euy6zLcl2wT2j/EiB6bWdAw5oOemdSrZN8/eYGXS5dPNxyQar1Q/L47lFlQgkCgkzlpJasTNWhwMlsEoqpumWcM6pMVNbnRdM6fHomnGVxJi/BWw5LS3HXTs6VSjewKe+5zdoCcu1N+QUVnncqXCESVoK222/J1WCVXLBNPYp5MdM1Jw2KuXH7CUoVTDQcoUClTIpDQ2PIpIYuTOg+BYiyvAfEx1AajjkcZhDBasOjqLlBTiA1Q5oVLg0ySvkRWyItTTK00b5STRDQVEM0AGCwmD5AOQ3HG2RCDb4LmHFxsUOGYYcFuMKR2HHSLCjSDiiWqIaGNC5OkxEraiVKii5bj9Tk3pFS7YxTVINSEJjE7FxJykMTEJhplQj0wkxKewSkUDbJsV3HOZNoN7iO4U5kOZINciHMU5gOQga5gubFtkNhwxuTIcgHIFyDgG5AOQLn7i3KxAbmC2DbIsOBLZDOJAAaBYxgtAA3Q3HMVW5y2Cku45FiDooY5lqErrcmhbi7D5FQaY1cEU3NC2hzQLRFJXYLdDZIU0SQWy3os1PtbKTsiM3CSafAaz+pwPV6bLaRfxys8/odQpJOzZwztJnDqXGm2b2LyaOfAuMg7O/w+aWCxXyrZmfnxppmlkWxUyRtHZ+pUWMjLjp8CHDfg08uO/BWePcw8ntMIhj3LGOBMYDoxPP8AJ2NIKEdh2NUBFDUqOdfDYjExSdBphAcmTYtS2OczWGZ3HKW4lzOjO2bZyD29iLBTtHI3zkhx5LWJFeCstY1sjfETPpy4IZKIlwdLUjK6RQzSpl3M6RmaidNka+JqE7Y/Gynjlb3LMGcHlhLKZLQtMYmclD5ljxuckkrs3tFp1gxptfO1v7IraDSqCWSat+EzRgnN0lye75NspDtPjeSSVbGr34dJp5Zs01DHBXKT8IVpMUcWNzm0klbbdJL1PIdf6w+o5fhYG1pMbuPjvf8AvP29F95jjH7rTv5it1nqmXqmr+LJOGGFrFjf+ivV+78/gZzZzYLO2TjG3qWzrBvcmxhNkpg2dbAGxk15Lem6hq9K70+ecPVJ7P7uCgmGnZnqS/Tlr0uk+1OeDrVYYZV6w+Rr+DLmfqXR9VgnlWoWHMlahKDTb9Ntn9x49cBJnNrwYt7xf6r2PSOrYlNY1kcovw9mvpZ6iE1OKaaaa2o+Uwk4tNOmbeg+0Wt0kFBqGbGuFO019GjDf8f33K5p7TVZIaeEsuaahjirbfCR5XV/aSWbMsWix9mNunkny/ovH3lPrHW59TwYsbxfC7W3NJ2pPx77bmXpd830Rfj8MzO0rpqx1E3O2223u2+TS0mplBppmPjTbs0dPBuib/j8R7eu6dqviQSb3LWo0uHVY3HJFO1yuUYvTpuDSaN2ErSadpm/i8ks5Ws9x43XaSei1MsUt0ncX4aEI9j1HRQ1unaarIk+x+jPGq1s1TWzXozl/keH/wA72fKjU4KznwRZDZypLmLsPI9iu5U+TbCas4p00W1O0mZsJOy1jntydk+HFhT25FTl3beBUslOrJi7GpLVExJq0clQzS1aFZIWh0TnGyTjPyY/YQ4UzRyQvwV8mOiaKrpErkJqgVyZ2JPxssQZXxj4+DLUM5HApk2Z2AVo4hBUEAGhbQ5oBo2zCVpxK+SJcnB+gmWNvwbZLim0A0y28Lfg5YN+DUuKfY34DhhbfBdjg9hsMG/AHxXx4eNi5hw1Ww3HiS8DkkkM0RikiXRLdAsAhorZ2op7lmTSW5la3OraTAKeedzdC0C3bthIE0xBIFBICGmGmAkGVCokzlIE4fAOyLBvY6w4BNkNgtgti4Y7BsBshyDgG2C5AOQLmSDHIBzAcrIsQE2DZDZDYAVnWDZyYgJMNcC0GmATRzRKOJoLaBGtANB0gp0x+OYiqCTaYWE0McyzjlZnY5+5axz2RnTXE7RzVi4SsYTTBJCZIsNCpIikrSQtofNCpIqUjdHmePIk3s2ej0uW0tzybbTTXKZtdN1HfFW90Yefx9n6i83lejxytDU7KeGdpFqL2OOWytnSViMkOSz4BlC0dXj89nqosUMkLK8se5ozx+wiWPfg6pv9J4qqFPgYojFD2CUDHeenAJBUM7H6AtUc+vHVORKdAWQ3RExTNcgHP3FudC3M2zkz3PbkKMtr9So52Pwu2jpzkluPASQKXA2Ks24Dca4LUFQjGuCxHg0yJPYgJMNsXN0jRdVsz2MrUS3NDPLZ7mTqZfMTUJxumWYSKcGWMcjk8kC5F2hiZXg9hqZwaDy0FNtJfga2i09JOS3E6HTNu2in9oeq/AhLQaSVZGqyzT/UXovd+fRHr5l3Uz1O1T+0XWFqG9DpJ/yEXWWae02vC9l+1nnZMNpJUlSXCFvc7MyZnIzttoWQEyCiQQ0ScBoOs5o4QTe4SYByZNBqYaFRGIimYibATJsngS2N0jXx1b52ENjNOn8RVzYrPQjcw41fBo6eKikqK+GKci5BJHFr3Vxf07uq5NrTu4IxNI/nRvYUuxNehXhz7XDVweG1tLXalRVL4sq/FnuVweL6vgeDqeePicu9e6e/77Or+XO+OJ18VDmzrIfB5bMuZVmty1NWhM4mmaRadD4TryISDTo68Xog8rtJoLFPjcVN2heOdOrLsXGnFpo5oXilaHVYjQmGlYDQyKtAAuNickNuC1QLjfgXDZ08b9AFjd8F+eP2A+HXgmwuERg14GqLSGKHsEoGdyC6YSQzsCULIuDLUbGKFhxh7DVjHMAjsI+GWuxehzgvQ0mRxUeNegDxr0LjhZHw78GkhKaxW+Alp9uC6sVeCXBJFcCmsaXgJRSGSQND4HHHHUUbiG6W5zaStlPV6uOOLV7gXA6zUqEWkzDy5Xkm3e1kajUvLN77Ck7DnCtOTsZEVEdEhAkg0iEg0hwCSJOSJaLhIIslgtlcDrIbBbIbACbBcgHIFsRibIcgHIGxUCciG7IOJ4HWc2RZzEHWQ2cyGAdZ1kEpgBphpi0w4k0GImtgUGiAhrYFoZRDQugpoihjRyQ+k6Ow+EmhSQyKZFC1Cew+MtipBjoyI6DrtANEpkvcmwyJLcVJFpxsXKHsECpJDdFlePMlezJcAHGnaNJP1OHx6jSZbitzRxytI8/03O5JLyje06bS2OPXg1+uSNcnrdBUHHDNJNpkpNco6PF/A3v76FKcLeyDjo3JW0l9RmNpTTaLq4PS8X8HOf8Aa9DKyaRwfH3k4dL8S7aVF7NYiE1jbbNp/GxKXBR0WOnbbfsIz6FX8s69mPepaultQtZviNNO7Zp/44s+Gqrp2V7qmvqKy6TLjXzRa9zbxP5UE0mt0Z3+L47PQeWyQaK03TPT6rRY80G0kpeK8nn9Xpp4pNSTTXqcnk/jfn4Fbvt0X9Km6ZmwTc6NfSwainRlM8oWIxsdGIMIjYoqgzGhy4FwQ1cF5PLmIyPYa3SCeFONN7mucXXw6yNRKrMjNK5M29bp5pNpWvVGDmTUnZGs2X2kcJD4S3KMZpOrHwnfk5fJAvwnsNUiljn7liEzz/IXVLq2vXTdOsOBr9Kmtv8A6F6v39F9543Im222227bbtt+pc1GSebLPLlm55Ju5N8tlXIj3MyScRq9VmgGh0kKkadSBnBNAtB0IOokgXQg5nHUHTRRKQSQSQdAUqGJHKIaQgFBVsEok0IAou6DC55U2tluxGLG5ySSNrT6f4ONJr5nu0Rq+uBbw82WY+pGkwvJSHzxKM5Q8xOa44cHgyKMkze0mRTxqnZ5uDaZr6DM4tJvcM/46lXmtZGL9pNKp6WOpivmxOn7xf5OjaTTSaByY4ZsU8WRXGSaa9Uzu/P7z+f+qseBvc6x2u009Hq54J7uD2fqnwxB5Gs3N5WLnwLmhjAYoCWqIsOQtnT46SJPYR31Ma2ytlbTs6YqNLBPguwdoydNO6NTA7RFUY0FBbk1sFFUwMajaOcBkVsS1sBq7j7AuC9B7QDVAC+yvBKiHRKRPCCohKKCSolC4ExihiQMQ1yEgdR3bZKVhqJfABQ9jlBIalRDQzLaSFz3Q5oFxtDJVkqBobkVCm0luxhAM5qKtuivqNZjxJ7psw9b1RybUGVJ0NDW9RjjTSaswc+qlmk22VsmWWRtttkJ2X+eJuv+HJ2NiIiOgyNJWIDYioDYmYNihiQuI1IqElI5koiRZBbFthMBsoIbAbObAbAObBbObAsRis4EkmhJxxwqHEEkEhBxJ1AEUdRNHJC6HLkNApBJUTQZENARDRNAkdRKJJAKJSCSsJR3J6QVEYokqI1Im6AEgk6J7TqojoEmGmJuiVIqGejmrQtSDTtDgBJewpwse0N0OmlqtVDHBbtm3jn6vF591p/Z7pkss3lmmoLl+vseshjhBJRikl7A6fDDT4IYoKoxVIcevjxzMaW/8RQjLFLdFgFxT5RVhKij86otQ/VViZx7ZprgbF7Ch8RlTaTSuilqE6Vcs0LTK+fHbtcIYUJttKDe4eJNZIrwFOG6l6EK+5P0Yz40car8AwIO0n7BWvUCrm65EarTx1GFppXWzGzaa2ZCmkt2TZ0c9PLvD2Z3FqmmamGFQWwPUcKWrjONVJXt6jcaaVPlcnnbz+dUv7NSoOK3AtJW3SE5NSoLYWcXV9FV1NJW2kvcF6jGnSdv23M1znmaTbpl/DgUUrW5158Ek9rh+K5tSapLix74AikqS4oJqzbOZmchULgpJp7p8nmeq6f4eVtLaz1CVFPqOmWfA6W68keTHciPFTlTGY8t8MXrMbxzcX4K2PJTpnnbz1NauPJ7lmE+DNx5Ni1jnwed5cpeZk+RWTgNsVkkevCKkKYUnuA2UTmC2c9zkgDjqCSJoXQBIlKgqOr2F0OSCSORKYdAkg0gU9yUyegaRyVuiE7LeiwPNkSrZbsOku9NwKCebIrS4T8suRTnO3vZzSVQiqSL+k0/yKbX0Ea30zTu035E6jNHLr80sa+R7J+tKrLWryfoumUIusmRV9F5f8DKlL4eFyXL4MvJrnppJyDVqdP1LuC9pL9ZftMjS62GdrHkajkT2b4f95uaWPfFOOzS3KzJqJjS0moU40+Vyi6vYypQeJrNBbPlGhgmpwTs6PFfzfzVxifabSOUcergr7E4Tr05T/G/xPNXTPoWWCyY5Qkk01TT8o8X1Pp89FlbSbxNvtfp7M5v5fj7r9ROp/ajYMmc2BJnHIgMmLbObBbOjxwnNlfLuhzYrItjpgiNPOpUbGnlsYePaZraeT2J3GsaUXaDWzFY3sNsgzYPwN5RVi6Y5S2GEyoWwm2yACErDpJAp0S2xBzIT3BbITtgFiCsYo2KxsfFgBKIaRCexKaKDqBaC7kA5IOnxzRDpLch5KKmozOnQdBer1MMabbRg6zqj3UXR3UMs22m9jEyttsrPtF1xOo1U8rbbZWbsmT3AbN4i211hRAvcZEKDYjoCYIfBGWgfDgdETBDoozBqGIBBlwhJgtnAtlEFsWw2xUmMBbAbCYLAwsgmjqEHImjkiSehB1E0cT0Io6iTqEEUTR1E0IIo6iTkhByW4SRKQaQrQhINI5INIi0OSOS3CSJSI6HJBpHJBpE2klINI5IJEWmithcthj4FzCAtshSIfIKtvY0kByb8DUxUVSCsrg4bdnpPstp125tRJb32Rf7X/Aw+n6TLrM6x41d8vwl6nt9FpoaPTRww3rdv1flnd/E8d7+q0zOTqyccQ3R6KnWRaIclVtoU8lPjYRydNmlJULuk0wXkd7FfPOSaSfLFxcn9LUJdttvk6c4tLdFVt1yA22ueB8P8y0xzSbXqApJSbS2ZFbpgzdJ+qDhnx1DjC74Fzy5GrUqv1Kzi+1NOt7a9RrmqS9APnBrNJw7W90KnlfYt2Lk6ntwxOadQX1GVgv0lSywvdxbdepaxNQxqU3u939TK0rbyZMj4WyLDm2lu/ozHfjm6Vwdm1Hc6XHhFffJNJ7r0Act6St+S7o8LT75bv0LmZmchTB2jwtSc5pJLZIvprwV03KSS2iv2lhUlYzs4mErlVeRguK+e/CQblVLywqKk7Zgykkvdg96Sq92LpceU69p+zM2ltZ52cqdo931jTrNpnNLeK3+h8/1UuzK4+jOLyZ5U6W8OVVyXMeTjcwoZmpL0L+HNdbnD5sekMuc6RXnNth5GxEmd3CQ3ZFkNg2PhjTJQKYUSaBpBJHRVjIoztHAqJziOUDnAn9BXao5WPcPYBxY5ogphIiqOQyNxxcpJI9H03SduNKqb3dmT0vT/FzptbLdnqs0Vp9A2tsmTZey8i+qk/tQwwU86S3V8+pvYlBKuIwVt+iRi6LJHFkUpK16F/PqILTKEZK8rp+y9PvFq+zyq6jNLUZ5Tlsm6S9F4Keok3slskaWTTuGD4q3TV7eN6/eVcmHt0mbM1+rBv8AYc/kvFV5nuanafk9D0fqUoNQyO/R+TzTfzl7SzcZporVuZ2M5ePoODNiyQptK1wxkYPDPbeL49jI6XmU4q6fhl3Xaieiw/HjcoJpSXon5L8P8ibn+TbjSUlJWhGpwQzY3GUVKL5T8/3lTB1TTZkn3JP2fBdx54T/AFZKX05Ou83ODry2v6NPG3PT3KHNVuvqv4mJmxzxv54te/g+h5MSn82NtS528mZmw6XPN480F8Rc9u0vq15OLfiuL7Rc9+PDN7kWeo1X2dhkt6aab5pbP8GY+o6PqdO3aa+qovObEXNigBNWh8tPlhzHb1EtGsIrHH5zTwKqKMF86NHCuCNVpFrG9hqYuC2GRIUjup8jsbtFVv5izi/VsfQa6FtpHTnS5M/UarttJhFcXviJPk55ElyjGetl6gvVzae4+E08mphF1aIhqot8r8TzWo1s/iUmTp9VNtblfn0nseux5k0tx8cifBh6TJJ1bNLE2zPqovKbYSk2IghyQv0YtyJEnMXTKkipnWzLkitn/VY0157Xrkw8r3N/XrkwM2zZt42NIkwGyZPcGzpgEuRkELiNghaBsEWIITBFjGjDRw2KGxQEUNiiCGgkCkGi4TgGwwGMgNi5BNgMOgLBYTBF0IomjjqFabqOJolIXQEmiaJJAaJokihB1HUTR1AEUEkSkEkTaHJDFEmKGKJnaAqISiGoBKOxF0C1HcJLcJqiUhdAUg0jkhij6k2hCQSQSj7BdpIKaAlGyw4A9ll5yrio8bbD7EkWVjrwLlE6M5VwhKjroNot9L0MtbrceNJ9qac36JcmufH28Ez16/o+ijo9DBdv8pJJzfm34+40TkkltwcetmcnFJFZZKMGE5pcbiMzck7GrM9ocqVgLc604e6ITfkGnEp+BWZ7xXuFJ9rvwKnJPJGnYAbk6QEN20S9myFSdryB/wBJT2a9CvqMvbS5bdDMmRQnTfIhLvk5tWk9gENul9EC222c3t9UC3UU7A0TlUUylnm5QbT2THZ51ibvgUsN6NSd3OSsVXnMvsWGPZplfMt2c5tJvz4DyOlXhKkVVJzypcK92EKTq3p4X8758I08KdexUwx2Sr6FxPbbhbIFanDMTubVDYq5X4QGGKScn52CUrdJ0vLBjr3TE0rfqA93s973YmeVuVJeyDi2l/Fk0ucTOajty/QW8jjynb2JdR3bv3ASdubpbbX4Fwq7Uty0uRPntbPm/UFWqmvc+h55taXNK+IOj55r5d2pm/cw8rLXxTHYcri6bEtoFv0OfU6hExEuB8kJnsaJIlsQmFIDyMzE7DjuKix0FuRTOgh0EBBWWMaMNUxRiE8boOER/Za4MLrgUpQrwLcS5OFeBEoFzSVZoKELklQTRb6fp/iZla2W7L6Tb6LpVFRTW73ZY6nmU8qxxfyw2Qemk8buK3apexTyPvyuV2r2Kyu/ODwY++SXoW4YVLO1SlFpJNre1wl6O9vvMjN1HDppqNub4cEuV7vwb2gzYdRjWVNKLpbcp+G/2D+08iacenvTytTg4Sl6tP8AvK/VnDD0jJjW+TKlFJburtuvojQzwwT1eaLtNRjN3xT83+Ox5LqM88tZmlPE8EG6xwprbw/dvmzHyY/y7f6Vq8UI6XLKbaxyr6Mt4tPODTcGvqheDDKbttv7y/jxuKVWY+Tf9MuLnTszxzSbpHpYdmp08seRJxmmmvVM8mpSg+b+pudL1akkpNX5MMa/Outc3+nntZpM+gytStJSaUr59H+B2HqGfE1U269zc+02jWTBDWY182PadLmLfP3P955k6bbm+ka9Vs4ftBmhXem0gtT1XR6+ChqYyhNfq5Y7OL/ijDYDLnm1zlL9NXNPW6bF8XSa1Z8K3dNNr6p7orL7QdTiq+PFr0lBNftKDXItoeL/AMK1oz6xkyKsuj0k2+WoOL/YyjN983LtUbd0rpfS9xdBpWdEvou2/RQjvwX8KpFXHHdFzEqRlr6qHx4DQEeAyWhLfzss49oIq8zZaW0V9B0oTnl8rMfUNuTNXUOkzJzbyYRZD3ZDdRb9gntyJzTqL+hcRWXmleVv3LOl5RTk7yv6l/RRto236yiNzRJ0jXxKkjM0apI1IHLW0ixAckKgNXAlJohoLwQxEVIqah7Mtz2KGqlSY4msfWvZmBqNpM3NZK0zC1L+Zm/jZVWb3BOfJKOohRHwEx8D4IjQOgixjQiCLEEYUHRQxICI2JMAkiTqOKJD4AbDYuQWkBi2GwXyLoQC+QmRQdNyRxNHUT0Iok6jqDoccTTJ7W/AugNE0EoP0CWNvwT+oAJEpDFifoEsTJuoC0g0g1iYah7E3QRGIyKJUKDSM7QhBJEpEpEANEqG4SW42EL8C6ARgMURqx+wax+w5nqpClCwlD2HrH7BrH7GkwqRW+GcsfsW/hkOBtnJ8VHCkKlAuSgJlHlI1kNVWNzmoRTbbpJeWe06T0+Og0qhs8kt5v39Poil0TpSx1qs8fnauEX49/qbp3+Hx/mdp/HN0mxMsjaVbBZJbNIV+tHY3PMcnTrwDPendHS2V+gLaobSBeza9SF6Ml7oFyVApGX9Rp+oDgk1Jbex2SdwdcoCGTuSXqhFJUydtHJ7NegDadryg0+H6jMiaWTK1LelQUUowpcLYCLSlN+4baaaEHWtivOba7UMTt16C3vf1AynBzhXl7FucFCGGFbJNsDTx7pRTXmx+oV568JJEaqu+lHMm2l+JGHGnnS8IdOFu15GafHScvLdIfTzVjEqTf4DoKkr+ouMGkl6jafjkcLVHbpJeAMmRY47v7vVkOSSq1Xl+ouFZJd74T29wTw3HGvme7fj0Dcm3Sdv9gDdUuQo8Uk22SVQ1NtJNX6+hzgnty/Vj4Y+1O92zsnbixSyS4SsTOsfrerho9E4NrvmuPRI+fajJ3zb9WavXddPUauScm0mYmRnNvXay1UORFgWSnZmzNaEzRYaFTRaVaSFtDpoU0M0we4/HyV48ljHyRpS1jXBYxoRjVlrGkcu1HY43RZSVUKhSSGJ7HNoByR9itOJbbtCJKys3hELG5ySSN3QaR4YK1Tmrv2K3S9N8XOrXk1+pZEnjwaVXkTUJvlR2uvrRpm205FfUtNvHjnSX67T59vp6mBreozbePTyqK2c1y/p6Gn1vUQ0mFdP09PI1eaflX4+vqedaN5eROvoUndnrej5lj6ZqZKClPHj71flJ7r8GzzWnwSyTSStHq8OlWHo2bIml3r4bk3tBPlv2SHn3TwjreaeTpE3pm28kEpPz2J219ePuswsKnPTwUpNpNtJttL6ehf0XUXly5KjeNSXbBr/AEeF/f8AVisWNRyLHH9WLaX0sx82uQa9+1jT4UoqkPcEg4QpJINxs8zWu0RXlFUL0+d4M6bezZacduCpqMXlIrNl9Ur369XpskNVp3CdSUk00/Ka4PHa/SS0WryYJbqLuLfmL4f/AL9DX6Pq3CShJ8Fv7R6T4+jWrxq54Vbrlx8/hz+J2Yv6zy/Yu/5R5Mhkt0gWxSMkMW0G2A9zXMCA4oAbjW5vPhHQRZxrYTBFmCMauGoJ8MhcHP8AVY1dJgrmW5bJfQr418yHzdIDirqHsZWZ/MzQ1U6TRk5ZbvcqKtBNlbNL5WMnLYq5pWmaZiKpLef3mtoI7JmXBXI2NIqijTy30MxsaZ1RpY3sjJ0890aOGWyOWtYu42OTK0GNTJUfaaBb2A7iHLbkAjI9jL1s6RozdpmL1DJTasqRnpmamfNmNqHcmaOoyWnuZWWVyZ0+OMqBkLk4lcm5GQQ+AmCHwRnoLEFsOghUEOitjGmbEdHgTEaiCGccmc2MkSYpsOTFNgEMhnWdYgiiaJW4yMLYreApRb8BrG34LMMO3BYhh9jK74fFBYGxi079DQjhXoNWBehnfKPyzVp/YYtP7GksHsGsPsZXy1X5Zy069Bi069DQWH2CWH2M75T/ACzvgJeDnhXoaLw+wLxL0F/6j8s54a8EfD9i+8XsBLHQ/wD0L8qfZRPbQ9wAcSv0XAUSkEo+wah7DnsuBhEsY4EQh7D8cTbOTkSoDI4/YOERyijWZXIUoewah7DVAmkjSZMrsAcUh0mhM2UCMjRc6NolqMrzZI3jxuknw3/cUcjPR9Hgo9NwtLeSbfu7Z0eDM1oL4MpU69QmKk7dpnacQ+GmKg9mvR0ht2hVU2m6vgao5z2piptL6HZLttsU5tbPdA0ju9p2uPQGb8pgSbW63XoLnkSi2ufQFOeRNOmm14IwpvGrfO6IxYU4fOt3uyxGKUa9OAV0tp7NeeQlfa0/AaVqmc1sAtVIN036smb7WrCUKWx0oprdWJNBjVycvUlQt16sJKkg41u/QVvEdO0eO8rdbJAZH3ZZy96RZ0ddkp+tsqt7P3dmNvs+ghBznSLGHGm0ktl+87TQpSnW9FrBjpWype0+8geyt2KyTq0ufLG5p22lwVckVk+RN+9Gh5/6BJ5Xz8vl+o9KkklREIqKSW/jYYov1peiA7XQhb9WWYQSVvn1F447quENSbkl48iZaoqbfojM6/qfgaJxTptWa3B5b7UZu+TgnslRHkv5yiV4nUTc8spPdtsq5GWM6am0VZs5WVDZKe4JKYJXGhckPoCS2KQqTQlotZI0V5oIotbMdje4l7MODDUVF7HKi1CaM+EyxCZzbybQhIYpFSE9hqmc9yDmyEnJpAdxOOaU03wTwNSM59P0L1CpTn8kG/X+4y9Nr9RpMryYppye7clab9XfLHdS1i1U8cMd/CxQqPu3u3/D7jPZrmcFvv0PU6l6huU8UFkk7c1abfm96K9bhMhKy+obPRM2jwzb1koqKTe/l1stin1HX5dbnyVOcdO5XDFeyS42/aVUgo423sg/fJw+3nFvpMnj1eNtNpumvVF7TNT1M5JUnJtL0VlTTpYUnXzvZe3uaeHGo5slKldr2tJ/xOfz3/E1mKDq2FCFoNQaPMtaFOO3AnNj2LjWwvJG0PNKxlqTwZlNOqZ6rp+ojqNP2tJpqmn5PM6qG1otdF1Xw8ig35O3x75f0nN5eMvqmkeh1uTA77E7g/WL4/Dj7ik3R6/7SaT9J0C1ONXPDbdcuL5/Dn7jxsmdVnvpanKhy3OsFsiyspGnuOx8iIvcs4Vua/0S1jQ+CAxrYfFHPq+1RKRD/VYygJLZhKpGJboLJwTiW5GTgfTjN1jpMycj3Zq617Mx8uzZpk6XOWxUzPZj5sq5Wb5iaHCvmRrYHSRk4uUaeF7IPIrLSwyprc0cM+NzJxS4LuGfCZz2NI1ISGqWxShPbkcp+5Cj3MjvFOYDn7jgOlLZnnOpZ08zSfBs5syhilJvZKzyOp1HfOUr5dmmM9rLd9l6jLdqym92FOTk7YJ15nGTjlycEluMGwXA/GtxEEWMaMtBYghyQuCGpbGVMS2GJgLgJcEgVnNg2c2BIkxbZ0mKchkJs5OxTnuHB2FgWMasuYcd0VcKNLBC6OfeuKkHDH7D44/YZCHsPx4/Y5NaXIXHH7DY4h8MaGxxmN0rhCxewaw+xZjjDWP2J6fFVYvYJYvYtKHsSobcEnxUeL2AeL2Lzh7AOHsA4oSxipY/Y0JQ9hUsYdsTxnvH7CnD2L8sYqUK8FzSbFVYw1Aao0w0kdOKXC4wHwjRyoNNI6skOC2GpC1JIJSNoqUy0gWwbsiQx1EmJm7GSYuQh1Xnyem6Pt0zBfo/3s81JHo+j5I5On40uYXF/VM6v43+1OLmSVJJcsVuMyK17oBb/U679PqLX0YGRpum6Yb2e6FZYd3zRe69RqhE522nyLaathZEpc2mvIptpU3a9UC5XJ2+Bbxp5rlumh8Y2ruwckKXd6CtPo4x8BqFnY1e5YUEHU/olQd2Q4Piiw0k6ZzirsXR+1P4dASjsWZqm0Kauif0XSWqQuU+xP3GZHSsqxTyZU29k7SMtbH1qwax6NvzVFTngZqZ9umjHy2DgXdJGf67VSLuGFYkq3Y+XyY6XLIxpNr2DyptbG+P+pt9qOVtp00vdgYla9vbyOlC21WxDagqVJF9a99OVJcBLfhWLU097tDsNybpUl5oE2jheySosKkgElFX+07uvgO8ZX2nJJQg5vwrPHdVbyTk3vZ6jW5O3A1e72POauFpv1Ofza76L5HkdbCpsz5+Ta6hj3boxsmzZllmWiUwUwolJaLRDWwVEPgaSMiKs0XMiKuTlgatM6LJmLTplGsRkPxzKcZjYyM9ZNfhMap+5RhMdGfuYXAWlP3O72V1MnvJ/IpzkQ2LUrJsOEl8hxQEd2NihUhKI/TzWPIpSVpPj1Aig0rMrTHqcyy6rJliu2Lfyr0Xg1dHleVuckk2kml7KjFmqaXua2hVRTRl573J5vtrwVJB0BB7Iat0ec1iHDYXNbUPfADSYBn6iCaM9N4cqktqZq5o2ypnx3G64N/HrnplqPQ9N1C1Gn7ZU7VNPyjxnVtE9Br8uCn2J3B+sXx+HH3G10fVLFl7Hw9i19qNH+kaCOrxq54d3S5g+fw2f4noeO/rPFf7ZeMZATVAs0lZijyW8PKKceS7p92jX+iX8S2RYhEVhVospbHLu+1wLQMlsNaAkhSm7Ggci5G41sBmVJh0MfW3TMjLyzZ1i2ZkZk9zbFOqU3TK2TdljKqYia3OvKUY9mXsM/BRWzLOJ1QaVGniZZhOjPxzqizCaMbGkq/DM0hq1CooKSCc0k23SRH5V1t9Dj+m9VUHFSw4oOc01abeyX47/cemyaPQY8cpz02JRim23BbJcmf9ldE9N0xZ8irJqX3u+VH/AEV+G/3ivtd1BaXQrTRlU83PtFc/i6X4m8zMwrXketa5TWT4aUFNtpJUkvQ83NtstavK8k6vZFZqysTkYa93oGC+Amjo455ZxxY03km1GKXlt0l+LNYT6J9jei6DP9ncOo1miwZsmWc5KWSCbq6St/QT9t9F03p/ScS02iwYc2bKkpQgk0km3v8AgvvPW9N0kdB0/T6SHGHGoWvLS3f3uzwf28z5dV1fHpsWPJLHpoU2oNrue74XpRPe1rfUeXgixjQMNPn/AOBl/qP8ixj0+Zc4cn9R/kZ6ZGQWw1I6OHLX81k/qP8AIYsOX/hT/qP8jKmhImiXCUNpRcW/DTR1EgDAbCySUIuT4R6SH2PzZIRmtbBKST/m35X1KmbRy348pOdCJTPXz+xOeXGuxr/tv8xT+wuof+0MX9k/zLmKPzXkXMPFPc9T/wCAtT/6hi/sn+YUfsLqE/8A8wxf2T/MdwPzWJilUG090tj6B0bT6fJ0jR5Z4MTnLDFtuCbbow4fY7PGDX6djdqv5t/meo6fpno9Bp9NKam8WNQckqTpc0TjHLexpmc+q/UtPhho5Sx4oRdreMUnyZeOJv6rA8+F41JRbadtXwUl0yS/82P4HJ/J8O9a7ielqsIjYxosrQSX/mL8GGtHJf6a/A5P/pfN/wDr/wD4CEg0hy0sl/pr8A1p2v8ASX4B/wDS+b/9f/8AAQokqKGywuMW+5OvFAGe/Hrx3moYXEBxQ0FozBEoipRRYaFtE2ErSiInAuSiKlERKbW4O6Hzj7CmjXGuJsDbRykQ0C2dmNJpqmxqmVkw0zolSspnXYtO0EjQ+uYDVjKsFoDIki30nVLTar4c3WPLS+j8P+AiSETWzKxq512Kevm9wGk906fqI0mb42kwz5bgrfutmOq+Nmej0uuTa2aAk+3jgKm1T2Fzi99k17FKhWRRycOmValGbT5/eWm4p00kwZw7la5XAqcvHYkn4oPJjuDXhnY00knyhySaom3sK3qti2LMXaEJNZZKtkx0b2IlIOpdYXJPdKyvpdbDKnFtWhPWdU8OBwT+aZgYM2TC1NSbSe5hvzfnXA9XNq9vKFTaSRV0utjqFFJ/NW6GZpqKSb8lTySzqoVqZXUU92wcbqaS8IW59+Ry8LZEfEUE5t8cGd1/bSQzPl+JnjBPaK3LmlSScqMvBc25PmTs2NPGkl68kTXVWL2BPstrdjGrR0UkkkSejjPM8YW+1fKqVIrOr3/AtZbbpIWoJO+X6sjq++i4Qcnb2XoWYJJbVXsAkrGK+ECUtpIFbIlqnfLOatCtClqE5Nt/cZepx2mbWWFoztRDnY5ddpV5jX4U09jzmrxuEmez1eK07R5vqGn52JnpFYy5CiC4tMKJpUNMiRJEgIqfBVyeSxkexWmxhXnwJG5GKLhuuhkZi2cmPgWVMYp+5UjJjVIzuQsqZKmITsJPYi5JYUglIQn7jIvciwLMB8FZXxssY2YaM2KGIWmFZlQHK94/U1tC/lSMbM6nD6mvoJJxW5n5p/gM/Wtj8FiLK0GqTRYg0zzq3hnKIcdiUFVoAqzx27K+aGzL8olbMudhz0VjIV48yktqZ6nQ5I6nSdmRJpppp+U1ueb1ENrov9E1PZPsb2Z3+HfvrPN5ePOa/SS0esy6aV/I6i35T3T/AAKjR637WaTux4tbBbx+Sf0fD/Hb7zyjR131S1OUK5Lmm5KiW5b0y3NP6Q1cKtFpIrYPBcitjl39XAtAtDWgGtyIaYKkLyq0Pitgci2GcY2rhaZlZobM3NRC7M7Pj52Lx9FYeaFMrSW5o6iG5SnGmd2b6Iqg4OnRzRFUV9CzCQ+OSigpNDFkJsOVoLJ7l7o2ifVOp49NTeKPz5n6RXj73sYizeFu3sklyfS/sx0p9L6cnmilqc1Ty+3pH7l+1scz/a57a85ww4ZTm1HHBNt+EkvyPlXXeqy6j1DLqLai3UE/EVx+f3npPtx1vsj/AIq00vmaTztPhcqP38v2r1PCN7NsqzqdX+kN77si16o+lfZPommxdDw5NZpMWXNnvK3kxptJ8Lf2p/eU/tv+gaHpsNPp9Hp8efPPaUMSTUVu3aW1ul+IyufXXgGj1X2D6O9V1D/GWaP8hpm1jtfrTrn6JP8AFoyeidF1HWtYsOJOGGLTy5q2gvRerfhfwPqeHDpOldOjjh24dNp4ct0kly2/X+JXwZz/AGsZcuPDDvy5IQjxc2kr+rFfp+j/AObwf2q/M+Y/anrz63qljxprRYW/hxa3m/8Aea/cvC+phxxw/wB1fgLkh3b7X+naP/m8H9qvzO/TdH/zeH+0X5nxuGKD/wBBfgW8eHH/ALi/Ai6kH7fWf03Sf81h/tF+ZYTtWnaZ8gz4cawSagk69D61p/8ANsX9BfuQS9VL15T7Yq9fpv8ApP8AeeerY9J9rleu0/8A0n+88/2GG/8Aao19U9Wv5Cf0Pqmn/wA2xf0F+5Hy/WRrBP6H1DT/AObYv6C/cjXx/Dwwuufaf/FGu/Rv0KWe4KfcsiXN7VT9DKf2/S/2VL+3X5FX7br/APG1/wBGP72eVyeSpr3wrq9eyf8AhDiv9kz/ALdfkcv8ISf+yp/26/I8M+TkX6L9176H28U+OlyX/fX5FvH9r5ZOOmtf95fkfPcE2mbGly1VnP5d6z8Oar2UPtJklx09r/ur8hq69la/zB/2q/I89gy7Lcu457HFr+T5I0la663mf+oP+1X5ErrGd/6g/wC1X5GdCfuPhP3Mr/M8prq6rnfGhf8Aar8h/T9c9a8qeF4niaTXdd2r9CgpjehO8uu/6i/cdH8b+Rvyb5oNbL/NS+hVTLGZ1hl9CpF2Z/zv95//AAGHEIk4TA1YDQ0FqxcIhxFyiWGhbQuBVnEROJckhGSIiqq0LkOmqYpo38ekVASBSDSOzGkmwGpWKiPhujeUOS2OaDrYhopZEkJlEstCpRALnSM1Rnglwn3L6Pk091una9GedhkeHLHLG9uV6ryjcxZo5ccZ43aa8Hd4tfrJHqaezf3HNOtt/YV3Rvfknv8AR2a9LoZpN21T9yYqvY5zi9mDuuHaFafRNNNMYnTBTTVHNpEd4Ez2afqEmq2FzaeN+24nUZ1j0rknvVEW8DD6pkebVSbdqOyKKfgbOTncm+WKhu2eXrX610LWjn8PIpL7y9mn8Zt3slt9Sjig29kVOo9bwaKDxYmsubdNJ7L6s08ct9RrnNt5GlKax41bSSVttmbrOqabGknlUmuVHc83m1mq1028uRtPhJ0l9wh6d+Wzqniny16ni/g71O16NfafFjpY8DdeW6HQ+2WSMk46aC+smeUenkQsM14ZrPHhvP4fPsezj9sdRk2ShD6L8w5/aLWONxztP6I8XGEk+GizGU6ps2lXn+Jj/wDV6N/afXxdPLFr3ijQ0X2n+I0s0I7+Yv8AgeNtvZoKKaaatbj9DX8Tx37l9Q0ufHqIqeOdp+hbuo2jy32ZxZpJZJSfal+J6lLu8bCs5ePG/keOeLdzKKNNWc0Sklwcws9OcrJG0Us8LTNFq1RXywtHNqcNh6nHaexha7Baex6jUY+djI1eG72M7EV43U4ak2kVkt6NvW4Kb2MnJDtnwVKzq42A2c2C2UReRlafkfNlfJuhmrzdsAKXLBNA5kEs4A5bBpgIJCBqYaYpDERSHFjYsUg0yKFmDHwZVix0WYahrSkFYhMNS9zKwnZd3H6mlorSTXBm3bXszW0dOCMvL/qc+tTDK4otYylj2ou4+Eebqe20NXAxC06GJ2I3SWxUytXRbyOo2UM0krYypGZJpoq4JvDnTXqOyZFXJSzZEnd+Tp8XZWWnr3CGv6dPDk/VyQab9Pc8BmxTw5Z4siqcG4te6PY9C1ayY+xsyPtXpPg62Opivlzrev8AeX5qvwPRz7ite515+ty5p1umUy5p/Bp/TJq6fgux4KenLsTl8n1pE0Q1sHQLIhpgtgcmyYyK2F5eBw4zs/JSy0y7qHyZ+V1Zpk2fqIJtspThvwaGZ2Vpxs6s+i4pONAtFmcRLRpKOFNEUG1QvJtBteEVEvXfYnoT1OddV1UP5HG/5CLX68l/pfRePf6Hp/tJ1vH0fRtpqWqyJrFB+vq/Zft4LnRUl0TQJKl+jY9kv/pQ3LodJqMiyZ9Lhy5Eq7p403XpbQ2knp8czZZ5ss8uXI55JtylJvdt8svdB6Y+rdXwaam8Kffla4UFyr99l959UWh0ONWtJp4JeVjiv4FiEYQVQior0SSQF+UpKMUkkkl44SPk32n6n/jPrGbNB3hx/wAni9KXn73bPbfbLq3+L+mPTYZVqNSnFU94x8v+C+vsfNGk1XgC1f6fRemdX6b0T7L6CWaUY5MmFTWLGk5zb5de/qzx3XvtDrOs5O3I/haZO4YIvb6t+X+z0Mqq/Ci70npeXq+sek0+XFDKoOaWRtJpVdUnvuPvam230zktxkEbfU/sn1Hpeilq80sOTHBpSWNttJur3S2v95jwViqbOHY0XMUdivhjwX8MLOfVMGoh/k09vB9T0/8Am+L+gv3I+aamH+TT28H0vT/5vi/oL9yL8d9NMvNfapXrcH/Tf7zCcD0P2mV6vD/03+8xXAx3f8qVntRzYlkg4Phlt9a6zjiox1zSSSS+FHhfcL1FwxSkluket0fR+m5tDp8mTSQcp4oybbe7aTfkvx/q/BJXgeo6vUa3L8bVZPiZElG6S2XHH1MvImm0fV39n+kPnQ4397/MW/sz0R89Pxv73+ZrJz6VxXyV8nI+s/8AhfoX/p2L8Zfmd/4X6F/6di/GX5ll+K+Vw2ZoaadUfRF9mOhrjp+P8ZfmHH7N9Gjxoca+9/mZ7x+j/FeNwZKSL2PJseoXQulx/V0cF97/ADDXR+nLjTRX3v8AM5Nfxbf7XI85GY+E/c3l0vQLjTx/F/mEum6JcYI/i/zMr/C1/wBhsWEzsMdRgnklptQ8ayNOS7U7a+qNtaDSLjCvxf5hLRaZcYl+LHn+J5cXudBnYs2qprNqHkTXDil+5DoyG6zDixYHLHBJppXbKkJe5yfyM7zvm72mtp2HYiDGJmPTGccjhgDSFSQ5oXJCImS2EzQ+SFyQiVZxEyiW5REziKXhWEUEkEkTR1ePaLHRW4/GKSGw2OvNBvhENIlcHM3lUFq9hUojWC9xhUmgcOoyaedwdpvdPhjsiEKNzQTVz7gamPV48iXc1F+jGqaq09jIauaS9S5r29PpI9jp1yg8f8u6vLEz2t96fIcZbcniNR1HWqbS1M0k9qdEYPtFr9O0sjjnivE1T/Ffkdc3KT3amvUXkyKrvdGRoer4tfivG0siXzQb3X5r3GZcrq7tka0a89VBwac+TJ1fUF8H4La+Vv6lXNmcW2nSfj0MfPklLO3ezRjvfYuLuPUKSauqZYwuL5a9TKxqn9Sv1XVSw4Fig2nPl+xy5z+tch5naf1frjSlptG6hxKa5f09EeceRt22Kc23bYNnqYxMTkej4pMz0t49Q4cUPWt9UZ1hJ2O5j0Mefc9NbHqMcuXRYi4NbNGHGTQ6GRrhsi5duP5HfsazhFu9jo472KEM8o0220aGnyLIk0ON5rOvhixJO2Nx4VknFJbtpHJW6RsdC0L1Gsxuvkg7bo0z/wBZebyZ8eLqvW9O00cGmhCKpJIupERSSSXCJLkfIb1dXtccccNKGhc1aGgtWY7yFDNC09jM1OO09jbyRtGfqMdp7GFKvNazDaexhajE03ses1WHZ7GJq8O7dEVGoxrIbItgtmiATYmXA2TFMIZEluCxskKaNIEM45nFByCQKCQgNcBoWg0RSMQaYCJRFB0WNjIrp0MTM7AsJ7BKQhSDTM7kHJ7o2NE32oxse7SNjSLtSZz+af4nPrVx+CxCVOipjkqRYxu2jzdNYtw3QcU0xcHsNgQp2Z1jZjZ8m7NfMm4NL0MXLBubRpiTqdK2SbfBXyQbRe+E34JeBtcHRNyM7KDo+Z4dQk+Gzf67p/03o2Rw3njSyR+q5X3qzzfa8WVNbUz1vTcyzaZJ7ujs8W5aePnHzu97XDLumfAHVNL+hdRz6eqjGVw/ovdfl9xOle6Oj+kfK2NOXorgo6azQitkcnk+riWiGtw2D5IikrZCcvA7wIzcMIIz8+9mdm2bNDNyzPzeTbBqeR2xTWwUuQWdUMDViJosMVNFQlaSFySaafDHTQplwjI6vVwiox1moSSSSWWSSXotznqdU7vV6h3zeWX5imy50rp2fq2vhpNMqb3nNq1CPlv+HqyvZNf7I9In1TqC1WpeSel00k33ybU58pbvhcv7vU+h6rU4tHpsmo1ElDHjTlJ+wvQaLB07RY9Lpo9uPGqV8t+W36t7s8F9revf4x1H6JpZf5Jie7T2ySXn6Lx+PoFX8jG6x1HL1XqGXV5bXc6hC77Irhf+/LZRCZAmaGO6drZdN6rpddFusU05JeYvZr8GxL4FzVppjn0n23Ljw6zSzxZEp4c0GmvDTX5M+R63RT6f1DNpMn62KbV+q8P71TPon2M1v6d9m9M5O54U8M/rHZfsoxvt109RzafqEF+v/JZGl5W6f4WvuROvTTU7OvMYY8Glghstilp47o1cEPlRyaqYXqo1pJ/Q+iYP5jH/AEV+5HgdXD/JMn0PfYP5jH/RX7ka+H4uMP7RK9Vh/oP95jOBudfV6nF/Qf7zJcTHyf7UVUzYlODi+GqYP6Z1LDBQx9QzxjBJJJqklwuC1KOxVzRtMedWfCV8vVerxuup5/xX5FLN1zrULrqmf8V+Qeq+THKVcKz1+l+zHSNRodPlzaaTnPFGUqyNbtJvyb4uqXLXhP8AxF1z/wBVz/ivyJX2h65/6rn/ABX5Gh9sel6LpWr02LQ4nBTxuU7k3e9Ln7zz6Rt2otsaS+0HW3/tXP8AivyPX/YbqGt1+LXPW6med45wUXNrZNO+DwKR7b/B1/NdS/pw/cxS9Vm3r1mulKGh1E4S7ZRxSaa5TSdM8BDq3VWlfUs/4r8j33Uf/wAu1X/Rn+5nzHG9lv4MfJb/AErVa0Op9TfPUc/4r8hy6h1F/wC0c/4r8jLhOh0ZnPdb/wCp60o67qL56jn/ABX5DoavXvnqGf8AFfkZanT5LGPJ7mWteT/p9auPUaiUHDNqZ5U3dSaLGOfuZuPJZbxz9zi3+tXulStCEh0ZFOEh8JGSllOwhUWMW5RpYDQZDQAhoBosNC2hEruIqcC04i5RJoVGqIodOIFU6Hm8TYFKhkdgaJR240RqZLFphWdOdBzAYTYMjSUFTAgt2/QOTISqDZO7zJV2mh36hLxZPW51HtT4Q7psLyt+hR63O5v60Yfx56tE+PM6h1JspT5LeodtlWR35+JAnKElKLcZLhp0195raDXazJDI82VzhFJLuSu/r9DKas0dOvh6PHFcyuTf1/uDd9FU5s+Sbdvb0QGGM8snSbo6rbsdppqD3OX/APoF8GcWm019TznVs0p6uafEXSR7KGSE1tTPK9f0jhq5ZYq4zd7eDb+Pyb9t/F9ZHccmBVMI73fi0Vkpg2SmDolGmGnQpMNMmts6PUti/wBNdyaszEzX6TBNtvgTs8Wu6a+HDbVnquguGCDxOk5O0/c8tPL2Ko+C90zqFzWPI6a4ZpOc4y/l+LXl8de6RxX0edZ8Cle62f1LBUr5qyy8rjjjhk5kMk4mwFSVoq5oWnsXWJyRuzn1AxtRj2exj6rDd7Ho8+PZmXqcV3sZWJrwZDVrYOjqDrJXkmgGWpQtCJwaZU10yGhckPaFSRpAUcc0cWHIJcghIQEhiFoYiKQ0SiESiKBBJgoJcEgSYaYtEoXCWcL+ZG5pN4IwML+ZG5oZ0kqOXzz0caeOPBZgqoRidpMsRR5m42h8XSGxdIRBjk9jNSW72KmTCnNtcFtK+QWkgCqsNeCXjSXA+gJKxwuMrVwq2aXQc9S7G37FXVwbi/YT0zI8erVulZ2+DTP5o/7Y6RKWDWRXP8nN/tT/AHowdNyj3XVdP+m9IzYkrk4XH6rdftR4bDynwejfg3PbX0vg0Y+DO0r4NGPCOXyfRBsGtwmQuTJTvAjPwWGtivn4Y4Izc3LM/N5L+byZ+bydHjUpS5BfAUnuC2dMMDFyDbFSZUKlTFMZNimy5E03SaXPrtXj02lxvJmyOkl+1t+EvLPqfQei4ejaJYYNTzTp5ctfrP29EvCPM/4OIxll6lNxTkuxJ1uk7tX9yPcThDJCUJpOMk00/Kfgd9elSPD/AGt+0yyd/TenZLhus+WL59Yp+nq/uPG2vVfifXYdE6TGu3pulVf/ALS/IfHp+hh+po8EfpiS/gIrOvjaTm6im36JWFkw5ccVLJinBS4cotJ/Sz7HPLpdMrnkw4kvVqJ4n7ea/R6vHo4aXVYs8oSm5LHNSq0qugKzkeNbBOs4EV7T/Bvqu3Ua7Qt7SUcsV9Nn+9HrOvaNa3o+owpXJQ74fVbr91fefOfshqP0b7T6Nt0svdifva2/akfWOVutvoGvbTPuPlemjbTXk2MMKSK2bTfo3UdRgraGRpfS7X7KL+KOyODX0SE62P8AkmT6HuMP8xj/AKC/ceK1y/yPJ9D22H+Zx/0V+46PB8Ux+tq9Rj/ofxMto1+sq8+P+h/EzXEw8n+9NXlEr5Y7MuSWxXyrZkwqxtev5CaS5VH0rTw+Fp8WP/dgl+CSPA/A/SNdpMH/ABM0U/pdv9iZ9DOvx/6lHzf7eZfidehBf+Vgivvbb/ijziRrfabN+kfaLWzTtRn8Nfckv3pmYkXWV+pirPaf4PVWPqP9OH7meQjE9j/g/VQ6j/Th+5hFZ+vUdQt9O1SStvFNJeuzPmcMGdJfyGXj/cf5H1STUU22kkrbbpJCP07R/wDNYP7RfmFz1dnXzdYc/wDwcn9R/kGsWf8A4WT+o/yPov6bo/8Am8H9ovzO/TtJ/wA1h/tV+ZH/AJxP4fPVjzr/AMrJ/Uf5Dcayp74sn9R/ke9/TtH/AM1g/tF+Zy1ujv8AzrD/AGq/MV8Mo/H/AMvFY8lOnaa5T8FzHk9zN1eVPqOpaaaeWTTTtNWxuHIcHl8fEytjHOy1CZmYZ35LeOeyOLWeVpKvwkPiyljmWISJUsHARYYzdQDQZwwS4i5IsNC2ibCV5R2ETjRakhU0TSV1zTJRMo0yEaY3wkkpkHHXnZCIkdZDdo3mgVMhuoJe5MwZvZIjza5kq0OnJLHKRg9Ylc39T0GlXbpG/Y851N3J/UrweswX4wcztsQ0WM0GmxLVHZPiSntf0NNpRhCPpFL9hnSXJo5H8y9kv3Br4moUBuPTyljyZa2hVk4Y90kvU9F0/RRnppYmlU01deTk3ffII81p83dl7Gq97LOr0cdRgcZq01sxebSz0+scWnV0XsPFN7ejJvq9jXPqvC6zTPDlcbumVqPWdb6XcXnwq15XlHmMmNxdNUel4vJ+478Xs9E0TTJSJpmvXTmdckwkiUSkJvnIsatpG/0+Cx47fkwsezTfqbeF3BOLdPiybXX4Is5Xb2bV+ohZHGVp7pj1JNU0IyY3dx3QTrtz/wAej6D1r4eRYsz52v1PY4csMsFODTTPlUE001s0eo6B1dxnHDmlz6+TWXryP5/8GX/7njexOBjJSimnafARUrwHHHHAEMCStDAWjPUCplhaZn58d2a047FTNjtM57Cr5gkEokpDIwMbWIVADJiTXG5ZUPYL4dkzRsfJBxfAho1c+G1wZ+SDi+DfGukrSQFDWgGjeUwhLkiiVyAEhiFoNEUjESiESiKBolAoIRJRKIRKEDMTqa+puaLeKMPH+sja6e+Dn809HGvhtLYsxltRXxqqHJbnlbaw+G41ARVIO6MliT2Obs5cAvhsAhukC2heTJT5EvMl5LkK1OZ7NPyZqfZnTXhlnJlu9yjmnU0/c6fDOVlqvaaDJ8TTRp3seP6hpv0XqmfElUe7uj9Huv4/gb/QtQpwUb8Ff7TaesuDUpc3CT/av4no5vcqvvKjpvBox4RnabwaMP1Uc/k+lkxrYFLcPwQluYLS1sVc/DLjWxVzrZlZDMzLkz865NLMuSjmV2dPjUypvdgNjc0KbaEN0dcNDYmb5CkxGSexUiaCc9xLnuDknvyB3GsiVvS67WaJzej1ebT99d3wptXXF19S0ut9Y/8AVtZ/asykybGGlLq/U5/r9T1jX/Wf5iMmq1GT+c1Wef8ASyyf72VbZKZJDai3bVv1e5y22WwN1zsOwafPqHWnw5Mr9IQb/cIiziaJpkkPGmpKSbTTtNOmn6pmjhzaltN6vUf2svzKOGNtGjghbRjvVhxo6XunNSnJyb5bbbf3s1ca2Rn6KG6ZqQWyOO320hWuX+R5Poexw/zOP+iv3Hkdev8AIsn0PXYf5nH/AEV+46vB8qmZ1ZXnx/0f4me1saXVFebH/R/iUWjHyf701eSK2RclyapFTUSUIOTdJKyckZ0DT/H628rVw08G7/8Aqey/ZZ62Tai2lbS2XuZfQdG9LoFOcay5n8Sd8q+F9yr9ppqUXJxTTa3a8qzuzOThPjmZZXqcrzprK5tzT5Tt3+0KMT1f2y6M8ed9SwQvHkaWZJfqy4T+j/f9TzMIk1lZxMY8HrvsCqh1H+nD9zPLRjbPV/YRVHqH9OH7mPKs/XpeoJPp+pT4eKf7mfM46bFS/k48eiPpmu30Go/6Uv3M+eqGyFo9K/6Pi/4cfwO/R8X/AA4/gi12epHb7EdQqvT4v+HH8EC8ONO1BJ/QtuIuUQ7SLTaZZw5Cs1Ryk0ydZ7Ca+HJxuXsWQxcOWvJew5eNzh8njVK18c9izjlfkzMWQuY5nJqcaSr0JDUypjmPjImKORwKYRRuoBoMhqwBLQpoe0A0TSV5RFONFpoW4k0EkNUG40D9S874XAsi9wmgGjpztIJg8tBS9zsauaXuLza76JpJdmi+qPN67ebPT51WkS9jzesjc2dfj9SHr4yM0E0ynODT4NLJHkqZYJnTmoU5IvPdp82kU5xaZdwpbN8KNt+xWvia0en4XOaSW7/Yj1uiwqEEkvBi9Gw90VNqmz0mGNJGEx3XV5z/AGzdfoYS12HN2pxk3GW3qmjJ1OkeHO1F2k+D1mTGskK8ppr6o871rFOOfujs3TsflxydXScWNSTjKCaappnlPtB0h6XI8mOL+HLde3sev0MHlai3TfqX9V0p6rTSwZaaa2dcMw8W7L2NfHv83r5A4NPglLY2OsdLyaDUyhKLST2Mtw3PTzr9Tr1/FJZ2F1YaiSokpUU3zP8AqKpmp06bk/hv6ozkm2avTMDtzS42QcdPjnL1d7FVPZ+oKTTpqqHTg2rSprlAP5kk0OR0SuUIyV8NE/ClHtnG1TtNeCVBpWraH43tT4NJE61x6z7O62Wq0jhk/Xxun7m0eJ6Vq3o9ZGXEJbT+nqe0i1JJp2mrQf2+Z/m+L/z8nZ8ojjmQDjSQccIBkrEZIWiwwJqzDUD5XGI+EAYRHwicVrFyh7BdnsNjDgaoexHVKWTFa4M3UYLvY33jtcFLPh52LzrlKx52cGm00KaNHU4abaRRapnXnXUlNHJUMa3Bo06EoJEJBJE0CRKIRKIoEggUchUhpkrgElMQMjsza6c7SMRPg1+myWxh5f8AURvYnsiwkIw00h+1HleSNodBpoMTC0rHLizJaVfBGR9sGwoi9QrxNLkAzc2ZOb3K8sl+SckKm0LcNzfMjK0Mpsq5pOti6sTb4Olpk48G2NSVNlP6Dmay02ei6vh/SOl5KVyilNfVb/us8toW8GpT4t0eywtZNOk901T+h3eOyrx848pp2rTXk0oP5UZ8cbw554nzCTj+DL2N/KY+UoeuDlyCnsFBWzmtWN8FTOtmXHsipm8lZDPyrkp5lSZey+ShqHszp8Zs/IrbTKWaPY2/BbyTVlbO7TOzJqc5pXuVMuT3J1EnF8lOU22b5ymicrZydtL1aQuybdp+jTNOE+gr/B0/PVf/APR/eNj/AIO8S/W6pkf0wpfxKq/wkZWtulQ/t3+QEv8ACLrX+p0zAvrlb/gjPmj9NXH/AIPunJr4us1UvZdq/gy7h+xPQsbTlhzZf6eV/wAKPK5Pt/1ia/k8Gjx//ZJv9rKWb7YdfzJr9NWNP/h4or9rTYuX/o7H0jT9A6Ppt8PTtOn6yh3P8XYWq6l03QYpQzavT4KTSh3pPj0W58lz9S6hq/8AOdfqcqfiWV1+F0IhBJ2kk/Umwfr/AIJLdhpbnJDIxtkWoOww4NDTw4KuGPBo6eHByeTRxo6SFJF/Gtgui6Fa7BknHL2dk+1pxvek/X3NRdHa/wDPT/8At/vIni3ffGkY2vV6OaS3apL1Z6zGu3FBPlJJ/gUsXTMUckZ5ZPI4O0qpJ+teS+2oxbk0klbb8HV4sXE9mzep754L0j/EotD9Rl+Nmc1dcK/QTI5t3urTJycMjQaP9O1ic1/IYWnL0k+Uv4sbj089XleLG6S/Xn/ur8zZS0+g0tbQxQXL8v8Ai2a+LH90harU49Jp5580qhBW/V+iXuz57j+0Oo0n2ln1HLcsOaoZca3qC4r3XPvv6mp1vqGTXT4ccMf1IP8Ae/f9x5XJHvyyTVqzab9s9V9YhLT67SKcHDNp80dmt1JM8X1n7OZtFOWfSReXTPelvKHs/Ve/4mb0XrGs6LNxxr42kk7lhbqn5afh/sZ7vpvWtB1OP+TZl8TzintNfd5+qsvk0frT5/jgen+xCpdQ/pw/czc1XSNBqpd2XTxU3zKHyt/hyR03pOn6Y836PKbWZptTadUq229ycyyiTlWdb/mWo/6Uv3M8Eo7L6H0HNBZcU8bdKcWr9LVGIvs1iS/zqf8AUQtS07HmqBaPT/8AhrF/zU/6iO/8NYv+an/URP5qeV5ZoCSPVP7MYn/rc/6iBf2WxP8A1uf9RD/NH5ryUkLZ69/ZPE/9cyf1F+Z5zq2iWg189NGbmoJPuapu0mFlibLFSE6Zdw5ONzPfIzHNp8mW8diW1hycF7FPjcxcOTjc0MOTjc8/yY4uVq45liEjPxTui1CRz8aSrcZDUyvF2NTAzDmQmSMwtWA0NYLWwuAloBoe0A0TYREoi5Rrge0C0SCK2IcRrjRFFZ1xNitkVKztOryr6h51SVHaSN5V9S+91CaOr2wpex5/VwubPQ6xfIl7GLnhc2d11zkGmTkx3exVyQaNbJj9ivPDd7GudoY2WF3sFnn8PBBJbySb+i4LmTTqwup6L4em00rT7sMXS8Xv/E3l6OPQ9CnGekxuLW6R6DHweI+zeqcE8De6e30PYafKpRTsqK8d9cXEZ/VtP8XA5JW4r9hdU1QM2pRae+3BW5NZ4tg9JjeZJ8pnolwY+jw/C18oeE3T+411wYfxs/noYH2q6dHU6N5or548nzTNjcMjVeT7RmxrLinjkrUk0fKus6f4OtyQqqbOn5Xp/wAHfe5rICjG3wGsbb4NfpPSZ6uadNRXLfk1kep6nuqmi0E9RlUYqk3u34PTrQLRwUEri+WWtJpcWHH8PHBKS3t87FrLkhkhWROMltT8o0zGG/P75n4x82BNXBb+fcqSwtO0vqjVlFqV02nw/AEsSTTqk+UV+W2PLYpwioJKStPkKOBzlUZUvcKWJqb7d0wkpwacVfsORV1/covgtNJ8rz6nq+kZJT0UVJ247WeewwU5rfxwel6dieLSpSVNuw1Hlfzd9zJVwgkhmdeW4444QcQ1aJIfBOvhvmONbItY4lfDukXMaPNrCGQiNUPY6CGqJmuF9gjNiu9i8oAZMdrgJTsYOpw3exkajF2SbS2PT58d3sZWqwWnsb+PbOxjNA0PyY3CTTQujqlIKTJSJokXQ5EkEiJyJBsmxASZKYJNiA0zS0EmmmmZaexodOdyr3M/J/qI9PpZXFW9yynZRwpqKaLcG20eT5GsWY8DFwLXAUW2YVoYuCJK9mEtkc1bDgZ2fD81oWsPqjRnBPwLUEnuhy0uKqwpPgNwSjuixSXgCaXay5SsZeaPZlUl6np+lZO/TJN+DzWq5NboWbbtb3PQ8GvSJ6pPVMXw+pzaW2RKX38P9xON/KW+u46eHKlw3F/fuv3Mp438pfmntV+nR4GQFwew2HBx6N0nsVM75LOR0U875CUKeZ7NmZqZ1e5oZnsZOqfJ1eIKc5u+RGSTaDabYEo2dsNRzwU7KE4OLNecL8FXLiu9jfOiUKJS3GvHTJUDTsICQaj7BKFBqFE2gCgF27hpBKJNpBUBiQahYax+xnrRBSHY47kKDHY47mOqFjDDg0dPHgqYI8Ghp47o496VGl0rVavp+PLDBixTjkn33NtNbJVt9DRXV+oP/VtP/WZRwrZFmCKz5tycXFhdS18tvhYI++7Ilkz5d82Vz9kqS+4GKCod8uterTQ0A1uMYEiTL6Xq46Setlkttzj2xXL2f4L3K+u1OXVT7srpL9WC4X5v3G5ElbS3KuXybfu2cTWbq9ov6GMo3Jv3NjWuoP6GXFWy8s79HCF+Bv6NCbTaprhrlBY47FiCK6Z2m1fUtMksGvyqK4jNqaX42X8fXOrxSTnp5+7xtfuZQihqQ/1TX/8AH3Vf9zS/1H+ZH+Puq/7ul/qP8ynRDQfqjtXX1/qv+5pf6j/MF/aDqq/0NL/Uf5lNoXIf6o7V1/aPqq/0NL/Uf5gP7TdVX+hpf6j/ADKE2Jk9g/VT2tN/anqqf83pf6j/ADMrX6vLr9VLU51FTkkmoJpbKvItsU2O20rbQkp0yLORNSs4Z00aGCd1uZUdmi9p220c3kz0Rr4ZN0Xcb4M/Bwi5jZyXDWLkGOiyrB7Dosi5UsJhWKTCTJ4ZlkA2c2SHMBolshskBYDQbAZNAWgGqGPgFkjitm8DtAryoTmW5Z6cryGvi97ib9WtWtvuMnJG5M1tW+foZ1W2dnkv+QqtKG1sr5FRcyFecb3KzS4oZY87FbU5J5Eu+TdJJX4SVIv5IFXLjuzozRxnYsr02oWWNquV6o9ZoOowyY01LlHlc+J77FfFqc2lyfK24vlG0rO9zex9DWsVchw1abqzx2Hqjmldp+5e0urlkyqraRF8nPR/t6iEE86yrzGn9S2ijosjap+UXkaYsaQSR8967pnk6lkUIuTcqSStvc+hIUtPhU3kWKCm+ZUr/E3/ADbyujweb/x1+uPGdO+yefIoz1SWKHLV2/7j076biwYYxwRUVFUjRohukO3h+T+T5PJe2sLLjnGpdqckqdeRkcK1WFJqsiVqzQy44Td8P9gl4WuE0/DW6Lz5M1U83Z/8sx4GsSUopO655FTwzqPbFNPzfBswwxap7+toL9HiqpUayytZ/I4wYadKbe9+gxYF37Ld+DXjpF3tpNt8+g+Gijdza+iH6PX8qKOh0Clk+JNVFftZspUqISUUklS9CSXD5PJd3tccccKoQcccQHEPg45vYi30b5lhfBexblDC+C7he6PP0xi5jWxYghGLwWYIyqxqIM4bDoomUdiTZubHtwUM+K72NnJC0ynlx8lSpsed1Wn3exnyhTo9HqMNp7GTqNO92uTpxv8A6zs4oUdQbi06ZDRt0gUSSwWASRZxzAnWSmDZ1iA7LvT51kKCZa0TrKvcnc9G9dpacEXIpJooaGdwRoLg8fyTla5+GrgZBV4E20MhK0YVoagktgL3Di7QAE+QGtg5cgN0ADwhU2dPJu0txE8rW7TKkTaTqIppitFqJYcqUXSsPNkTRUxNvMkt9zv8HWd+vRavPLPo2pb1TX3FXE9h2PHKWmVp01RWxulR0eafKc7fqxjdj48FbG9y0tkcO/qwZGUM73ZdyPYoZnuxQKmbgy9SrZpZWZ2fefsdXiCo4WC4FiiGtjo/RqcoewmeO/BelCwHjvwObDNlht3RCxVyjSeHbgXLHSNJslJ416HfDPTaPR9Ix9AwdQ6hi1M5Zcs4Vhl6N1t9ER3/AGXf+qdS/rL8zT2Hmvhv0Djjb8Hou/7Mf8p1L+svzJWX7MJ/5p1L+svzEOMGGP2HRx+xuLL9m/Gk6j+K/Mt6HT9C6hPNi02DWQyY8TyXllS2+jflozubRx5xYm/AePDT4LcMdpOh0cPscmtp4XhxtVsX8EN0WOkaXHly5c+oinptPG5X/pSa2X/v2DxxU8jkoKCbtRXCXoY7lklv9qh+KNJD4qgdPF5NZhwRVpu5+0Vz+X3l3Ngm80vh4JKCdKk6fuVnF/P6UQgqDWDL/wAKf4BfBy1/NT/Af51/xRTFyHThOFd8XG+LVCZFfARkexUyvZljUScccmuUrO1Ci+k6DKoJTyQbk0qbfua5z2WprD17qJSxx9i1r3vRb0PT9Jk6Zj12p1602Oc3BKULVpvzfszXMt+I4pwSSHRLi0vSFx1zF/Zv8w1p+kr/AG3i/s3+ZX4p8VYjUW4aHR5dPny6TqMM7wx7pKMK+nn2KSZNln0GEN7DOm6jCta9JrIReLUKoZGlcJeN/R/vA1uOeknkx5FU4efDXhj/AD6BbYqTNWebS6Xpugy5OnQ1E8+Luk+6qe35lSXVdEk2+hw2/wD3v7h/n/5JQmyvNm/k1PT49H0/UF0iEvjTcPh/Eqqve/PBnvq/TVz9n4P/AL39xX5KxlyYts3seTp/Uej9Sz4emQ0uTTJJPvcnb8nnm9ws4mivySgLCTJpGx5Rd0q3RShyi/peTDZxqYU1FMtQYrFG4IbFNMysaHxY1MREamZ3KjkwkxSYSZlcg2zrF2dZFyBNgtguVAuZjTG2C3YHcc2SBNkNgNnWLgLybtFzp6+dspS5L/T1yzbwz/OJ/tOrl8zRTSuyxrXUmUseRW0zp3P8hfqJ8ipKxmTkW2aZhkziV5wbLcqYHZe7NYfFJ6ZS3ZVz6WCfCNXI6Wxnaht2awXMJx6dJqkafT8aWQo6fIm6fJpaVqM01wc/l7Kz/L0OlSSRfRQ0k00qL0XaN/FVwaJSIRPg78lUMVkdIayvltIx814CJZKYWPJb5Ks21Ibhe55/7v6T/a/B2NQjG+B64PS/j30pxxxx1E4446xdNxxxxIQQSQRQ5gNkt0A2RSr5phfBewvdGZhfBo4PBw6ZRfwlzGuCphWyLmNGNaQ6KGNWgYoYkSavOGxWyQsvyjaEZIAGblx3ZnajDzsbU4XZTzY7vYqXibHndRhptpFNpp0buow2nsZeoxNNtI6ca6ysVGQ0E0QakFkEtEMYQcccwDkx+mlWWP1K9h43U0+As9G9f0+ScE0zUg20jE6XO0kbePhHj+ecrXJ3KDilQpbsarSo5Vwa5GWKTaYadoDQ/NlXNOtkWZNUUc0vmYyqO9IXkmn6FfJN2xTk75NcxFp04pp7iMEezOnzuF3NrkBSamn7nb4kX69ho0paZKk9jGzx+HqcsPCk2voafS53BK/BT6rDs1aktu+Kb+q2N9e8tL8Jwu2i54KGnfzl5vY4vJPYhWTgoZvJfyPZlPKk7FIbOzPZlDLuzRzxKWSDbbR04gVyGw3FrwLa3L6aHuHGKZCVsao0gAHEVKFpllxs7sVU2lY5TamizafT/ZLSPVaJauL1E0oOfbTt72J/xj0r/wCX4f27/IHQ9T1Wj0UdItPpM2OMnJPLFt23frXksf461H/p/Tf7J/mb3c/6RD6j0r/5fh/bv8jl1Dpf/wAvw/t3+RYXWdS/9ndN/sn+YS6vqX/s7p39m/zF+5/0ELqHTXx0CC/77/I0ej6rRZ9Tqoafpi0uVaeTc1k7rW21fh+AiPVdS/8AUOnr/tv8x2LqWp7pf5JosfdFxcscGnT97J/9cz7QzYYqS28DHBpKMI905NKKXlvZFiOPY0Oj6b4monq2rWK44k3Vy8v7uPvOLGb5NcIOpxrSaXF0/G7cfnyyX+lJ7/8Av7iNLp3PFkyppQg6bfl+iHz6dq8km7x983bfddXy6GaiCWBaXTuoY1Sfq/LZW89t3ucn9KK0d4tJqNa21LJ8mNrlJctfff4BRw6ieDHmx6/PPHJJ2p8PymMxqb02PFkUagqSSpFbHDPodQnpYvJjyOpYfX3Xo/c1zuW/iUCcdRFf57qG3skpbt+hZwrNo05Z9RlzZpLbHKdqC9X7lmWKOJzy4UpzWy3T+HtuVGnbbbbbtt+WXda8c932YZylKTlOTk/LYnvxfGWKeWGO033TdIbIRlhCf60U/qjLN99puzYNNkg4/wCMtKm1W81+Y3VaGGHp+j02bWYMTxRce6bpS44soQ02PP1HTadQVOffLbwt/wAjR6itP1LJoscpdqy/E+G9nbVbffR14k/PqJYWq6Zp8ltdX0TaTaSlu9uOQMST+x2ktf6zPn6sy9ZjWPU5IOCjKDaarho29A9Evsnpv0/Jlhj/AEidPFG3dv2e1WVmSz0lkqK9F+A1RXoi4pfZ/wAarWf2X9wan0D/AJnWf2X9xP5oF0NVp+sV/wAKP8SqmanT1039D6nLp+bPkk8S71ljVc1Wy9zGnNQi5NWkr2HufALPBZcbi+fD9C5l1ePqPR3+kZIw12mXa+508kfDXq/4/UPBpun5njhHq+H4mSkodu9vxzzZR6np1pM2fB3KTht3JVe1/wAQ9z6GrkzaHH0bpf6fDUTbw/L8KtuLu39ClPVdAaalg6h+z8yz2abW9I6dD/GOlwzxYalHJNJ262q9uCrPpWnkmv8AHOgX/wB/95V734Ku5c3RofZ3SSli1j0jyyWNJrvTt3e/HJlvU/Zrzg6l+K/M08vTMX/hzSaZ9T0ijDLJrM5/JO29k75V/sMt9E07/wBudP8A6/8AeO/fhXq3pZ9Ln9nusPpcdTHaPxPj1bfiq+88ze56LFptN03ofVsX+M9HqcmoUXCOKab2fFXvyebvcWk6GmHFi0w4rczqT8fKNDSrdGfj5NHSLdGOjjZwr5EPSsTh/VQ9ENY5KhiIStBpBw0olM6iCbkJs5shgyIuCdJinLcmbpCHOmYbwOndxHcJczu8z/A6d3e5HeJcwe/dFzxjqxyzS0CqLZmxdmpo1WJsPFP8xPqlr505GSstSuy71OdOX3mI8tM67jt6m321VkU0c2Z2PPTW/wC0tRy2i5nipTA6VClNWF3qi5FwrN5KOZXZbyOytk4KhqbuLtFvTanhN7lPKJU2nae4az+oz09joNUmkr3NjDkUktzwul1rxtNs9HoNbHJFNMxxbm8qJrlb6YSZXw5FJLcensd/j20cxOVbMcKyLYnze4GbmVOw8L3RGdbnYXued/8Akj+1/G9kPXBWxPYsx4PR/j1f9JOOOZ2E44g6yOhJDZxwrQ4hs4hsi0BboTOQc5UVpz3ZnaT5vge6NTTvZGThe6Zp6eV0cmkSNTCtkXMaKOGXBfxU0jGrPghiQMUNihGFrYVOFlhoBoVClkgVckLTNGcLK2SHOwiZWbHzsZmpw7N0b2XGtyjnw2nsaZrPUeazQ7ZPYUzS1enaTaRnuLXKOrOuxmBkNBNP0BZYCcyWQMBZKdM5kLkYei6Zka7a9Dfw5LSPNdKlaX4HpcCVJnmfyJ7aZW8avcamqFxdIKrOCxrBqmyX7ApNHOVCMGVuMWyjJt2y1mmmqRXSsaap5Iu7FNF+WO/AiWF3wazSeK9bENNNMsrC/QjJClwdPj2mxsdHnfbv7DutxdYp+jaf3/8A/DO6RkSy9t0a/V136GTV/K0/21/E7c+5xc+MfTO5mg3sjN0r+dmhexx+WexC8itFTInuWsjK83Ysw1DMrXBWa9jRyQUnYl4fY6JOQlFwsB40y68PsB8Fp8AaqsNboLsa5Ra7PY7sF01VQY7T67V9Nx5pYY4ZqbTfxYXVLxuG8e+wvVQrTZNvA87svo251HqOrwLSvT4tNWXBGc+7He75qnwVF1fqT/8AK0n9k/zLms1ut0uDRQ0uWEIvTQbUoJtuisuq9Vf+sYv7NG2/JJb/AJA/RdS1efp+uyzxadZcMoqFY6Tvm1e4qPUuqzVrBpmvVYG/4jtLrdTrum9Qjqpwm8copOMUtnuI6bny48k9HLXz0974W0mrb3TbXkV3bqSUjVr+qv8A8jT/ANg/zGx1nVH/AOTp1/2X+ZGaXVtPJrNrppeH2xpr60WdLPW4ks+s1c5pr5MNJOXu9tkTNW2z9X1/8GTknmzKL1EIxaTXyxaTK09HgSc5wSrduzQnLJlm55HbfC8L2QpS071cYanLGGOHzNS/0n4X08nH78nk5KBaHTR0WneVR7c+oXHmEPH3/wDvwPxwpcEz1GknklOWsxNv67L0JnlxRxxeLLGffJRTS4vyaeTG9X/4n/8AA6m5KEF3TfC9PdjEliTWN903tPJXHsgkkouOO+1/rTfMn+RVeoyafJJZdPLJidU4Va+4vEmf8c33/wBNXyYs+izvU6Nt3vkg3an9ff3LePJi12J5dPtJfr43ymTDVaLM6jnWOT/0cqp/tK2r0mXBlWq0klDLV7O1JejRrJZOb+BMtthORj/0iGrwLL2PHmTqcPf1RU1E1DFKT2SVkXPLwI0cvhw6hrvOOHwoP3e7/gU+oZJ4Oh9H1OJ/ymFd696otalPT9CwYHtPKnlmvruv3r8Cp1F//DnTE+Hjf8DqnqcTVX7R4oS1GHqODfDrMammvDpWv3ftJh/+jtJ//cz/AHsT0/UYtX9m9T0/UZYQzaSfxMCnJJyTttK+fKr3Rc0Oo0+n+yOlnqtI9VB6iaUFLtp297L59JlJ+waa9C6uqdJ8dCn/AG7JXVOl/wDoc/7dkfn/AOQPob/ybrP/AEY/xKLafJr9P1ei1Gg6r+h6B6SccK725uXdzX0rf8TCcg1PgN09LqmgpJf5RD96LP2iyLH1bVOXHev3IpaWV9V6ev8A+TD96D+1Mv8A8X1a/wDrX7kOT1C/poa3pXR9Fm+Dq+qPFkpOnhb2fHBVnovs60761X/Yf5Dvta1/jl7f+VD+J57I9nt+wLZLwWvV59L0dfZnRYp9TrSxyyePN8Jvudu1Xirf4GQ9B9m3/t1r/wDx3+QWqf8A8EdM2/1ifj3kK+zmLTdShrOl6jFBanJBz0+Zx3TXKv04f0sr+y+1Z0vROj6yOf8AQOrvNkw43kcVhrZfX3PPxlaT9Tc+yMZY9f1bHki4zjo5Rkn4adNHn4PZfQWomrMXsNgIg7HwMaR2Nbo0tGt0Z8DS0a4MdVTXxfqofETjWyHxCNIYkGkDFBjNxD5JBfIwhugGyWxbYuEHI9mVZOmPyPYqye7IuSF3Ed4DYDZP4Brn7gqdtCnI6DbmvqV+PRNHG7o2dKqwGPhXBt4dsH3HN4p/9xWWD1Z1f3nnps3urvZ/eedmz0MxGvofiNMt6fUp0myhPkX3uDtM0/PS7xu9+1phLJaoysOspU2WVnTVpon8rmlqTtFbIyfjJrdick0/IcX0nI7sqydMfkklyylmypWky5EWrEZ+5d0eqnhmpJtryjEx5X31ZewzTSMfLjjG+3uenayOWCafJs45ppbngumap4cyi3UX+xnr9Fm70qdi8e2mNf1WlYue6CT2BktjfV7GihnQvE6Y/OitB1M4N+tIrQxPZFmJVwvZFqD2O7+Pfap8EQ2S2C2dmqHNnWC3RCkY/oDs6wbIch/odE2kLnOkDKfuIyZElyTdE7Lkq9ypPJudly3e5Unk9GZ2l14bC+DR0890ZOGXBoad8GGkxtYJbIv4ZMzNO9kaOF8GFWvY3ZYjVFbGPixKG0C0GnZDQETKImcLstNC5REShkhzsVMmPnY1MkLRUyQrwBVk58KaaoysmmSk9j0OSCa4KebCm7o0zrjOxhz0/sIlga4RtywewqWn9jWbTxiSxteAGjXnp9uCtk03saTyQM9oGi1PA1whMsbW9FzUpL/TZ9rXsz1WkyXBbnkNE2ptM9HopvtW5yfyM9XmtiMxsWVoSQ3uPN1GsNlOlYmeSkdOdIryn3bWQLXNuTHYoXVgwjZYxrYCgHBAvGvQsdtsGSoUVxWcK3QjMqTLrWxVzxVM38d9ppOiyKGoX1PTahfG0ORLdyg1+w8jFuGZP3PW6KanpFe9cnp4LLz+le9+peTtFHHB48s4PmLa/BluLtGXlns4jI9hLGTYpuycw0NEpWQtw0jUI+Gn4IeFeEPig0k/AGovD7APE0+DS7E/ALxIVgZyg14FauH+TZNvBqPD7FfJjwSmsOonLHCSdzUbr2omT2F3U6vU6fBo4YMeCaeng28kW3deNxC6jr/+Do1/23+Y16nR9mOPwdTqXjioJySgqXHoctXlX8xotPiXrO5v+Brvd7/tJDHo9TqNbodbHJhxxlGUUvhRa7vzKuo0acezPjabVpNblrSa3U5sGrhn1CjkTSx9iUWl5onR5pJTyZFKedSahKbtRXr9eTPy/nVl7/QTp8j6Zo1HqWR5U5J48bj3SgvVv/3RZnjWX/KMeRZsc91NeF6Cfh97byPvcuW97ExxZ9DN5dG7g954nw/yfuRfLnyz8X1AuKBPwot24pv6DITx5sccuOLja3i/DF4smdKSenUpW6cnSrxsYTH+Vlpjjhg3Sgn9EO+AlHeCSXCYqtTP9fN2L0xqv28h48McbbVtvltttmnPHJ6ttPiFlxvM8TklNK0m6v6DJY2uVsKz6XDn/nIJv18iFpdRgX+TaqaS/wBGe6/aXn/zs5fRGZtPiyJrJjT+qFrHHHBQgqS4Xoc9ZqMe2q0imv8AfxOn+D/MnHqNHqJKOPN2TfEMipl/+d//ABvQRJJXSKeoxvUZsOmXOWaT9ly/2JlzJs2n4dFPLFrLHLGcoThdNOmrDF5fYD1vKsmbMl+rBdiS8Jf32UOrT7fs50r3xv8AgHq23jm27bTtvyU+tZ8cvs/0qEMkXOMGpJNNrjleDpz7lqawk1bdKzei/wD4L0j/AP5U/wB7PPRez+h6Dp+o6Zn+zmDQ6zqC0uXHmlNr4bk9264+ppJ2UozlILuL60vQv/X1/YM56XoX/r6/sGL80GdAlej62/TAv4mW5mtpsnR9Bouow0/VVqMmpxdqh8Nxpq6397MCWRJNvwPUKtDpOny6zrekjiVrFkWWbfEYpptv933lj7W6TJHUy6hjay6XUNOOSDtJ0lTa+mwK1mn6b0F4dLnhl1utV55wd/Dh/u3671979il0brC6b36PWR+N0zNtPG1bhflL96+/kcn9D0t/aHqOLUdYx6vQZ1NQjBxnTq074Ymf2s62ntnw/wBghmbpXSZZJPD9otJHG3cVPdpejd8iX0bpj/8A6k0H4P8AMfNF7R/4u65x+kYP7BHf+LuuLd6jAv8AsIj/ABL0z/5l0H4P8yX0TpjVP7S6D8H+Y+aHtudH0HU8ep6l1PqMMKWq0jqeKSak6u6XqldniYcL6GyuidMim4/aTROlwk239FZjpb7ceNidlT8bLGPwVsaLONHPolrH4NLR8ozcfg09FyjDRtfHwvoOgJhwh8Co0hseAgVwEM3PZAvglgvZDgAxcmMbEz4YyIyySRXbTYeeVWVXPfkRHPgF7AqZzkmhgLZOJp5EJlOmHp3eVIWviWzgVtI2ce2D7jH0quSNjjA/ocfg/wBq0y8z1iW0jAk7NzrLpP6mA2ejhlfoZPkTJhTl4Qt7msSFtp7MmOaUfILBZXAsLVOt2BLVP1K7AYvzB2mTzt+SvKTbJZDKkCE6aaLunyXW5SoODcGmidTsJt4LklR6vouR/DSk90eZ6fDvxKXqej6cqSPOuvzvh5nXoYO0TIXidxQx8HVNdjf+lTMrRT4mXsvBSmqlZzeX6iruB2kXIcFHTvZF2HB0fx77ip8GwWwmwJukdnkvDKnKkAsm4GaVCFlVuzn6nq13+4Msi9So8tcMXLN7h+i6sZM3uVsubZqxGTN7lTLm9ybsunZM3O4h5LfIh5LJi7ZnddJ47C72NLT8ozNPyjU0ytorYjW0/g0cPgztOqSNHC+Dmq13HsPiytjY6LEZ6ZICYaGENAtIMhrYRK8kIyQTVFpoVKIuBn5INWVpL2NHJCynkhTAuK/YmC8SfgbRND6XFaWFPwInp1vsaNAuCfgctTxkZNMvQrz0r9DbljT8CZYU/Bc0XGJ8L4U06o2tA7girqsNY2643GaDIqSHr/LJT1WxjdVuMc68lZT2s74ls4dZadNyTb2Bxptg3bQ/HDdGN9D6fjhsNSpkQVIJO2QuD8APkLwQ2kEMuSK2VXZackyvlSdm2U1mzSWQ9N0md6dx52PN5dpG10XLdJ/Q9Lx31EZ+q+uh8PqGXalKpL71+YEJlrrUHHPjyeGnH8Hf7mZ8Z7leSHDZMW3vRzlYN7kScUbEbFCoeBsRmYkMSAQ1LcDSkT2hJE0ALr2IeNPwhrRFEUEPH7HKPsPqwXAw1ACMEnaSt8sdGCIjEdFGNDox9hqR0VsGkBoSS4VBpHJElcPjkiaOSJLikUQwgWMgSWxVy4sbabgm1w6LcuCvkHLxKrNlTK+S1kZSzM0ySnnaaaZ5zqLSnSSX0PQaiVJ/Q83rZd2Z+x1YTSU6GJr0QlMKzYju5eiIcl6IV3EOYAblQLmLcxbmHCNc64FOd8gOTbIsfCE6fgGl6I6yLAJpegSSfhAodjg5MVodGF8IdHG34HY8O3BZjhS8GVqplXhj4LUMfsHHF7D44/YztV+QY4bo0dJFporQhT4L+ljujGp4vw4HR5FRWwyPJUVD0EDF7BFGhgsJgSKBbYqb2GTaEZHsMlXUPkouVMtah7MoSluHCM7/AHOeTYQ5AynS5HImpyZN3uWNBPuymXkm2y90l3lZPknMVPfb0+jVtGtkdYH9DL0S3RqZnWnf0OP+P9raPIdbnSf1MFybNjrj3r3Mbk9LHxhfoWQEQ1sakCQDDYEgAJC2GwGMBZATIoAhKw8cG2kdGNssYYJ5EiNXgb3TIdsEvFG7pV2tNcGToMbpJI3dPjpKzxPN+rrsaZ+NHDNOKHN7FbFshzex1ePyX8+1k5mU8j3LWVlObI3squaZ8F6BnaR2kaMDq8F+Hn4YLnwMFz4O3yfDihqJNWU3Pd7lrVcMy5zak9zi1riKbLIJnk9wZTvyKk7Mr5E9DPK9yvKTkxrVgqG5H76Awi3yWccOAIQLEFwXKHhcKqjS0zVoz8KujS08ODfZxq6fdIv4uChp01Ro4VaRzVSxAfEVjQ5IQNQSBQSGEnNWccMAaFyQ5oCSAK8kV82NNXRbkhU1sIM5waOSGzjTYpqmSTqOaOTOsABoFwsazqH0cU9RjUsTVcoytHkcJ0+VszfnC4s8/wBjhrMkPSTNce5Yz1GvCdrkON2IxXSLWONmG5wQ7DG2WYRpoDDFIsJVTRyaaRPBKVbktWia2M1Id0JnJt0hz4EtXNsAVNuPkTPNapssZINop5cTbs6McTScsk2Xul5FGa3p2ZuWMo+BmhyOOVb+TvxPTOX29J1iHfo+9cwal/B/vMFOmelSWo0Tg91KLi/vR5VtxdPlbP6m19xp/ZrlsRGVsRPLSJxZLMzXoMdBlXHMfB2I4sxGxExHRAzEEkCglwI3Uc0EkTRNICVEpIKjktzKhHaHFbnJWElRjqAcQgYoNIUU5BJEIlFw0kkElw0MEJgsCoJ8FfI9h82Vcr2JSq5WU8z5LWZ7GfqJUmbYTVHV5KT38HnM0u7K2bGtyVF/QwpStt+p2eOIqWyLBb3Is14B2C2RYLYcDmwGzmyCgIhs5ENgSbIvcGwo7sKZuOPc6Ro6fDsthOkxXTrdmrjx9qRjqtMZBDHS4GqASW4aRlWvHRgMjE5IYkZ0cTGJc0y3RWii5pluiEWLcUGiEgiokcHsMFxe4dlBz4FyYbYtlQFyZXyPZj5FfJ5KKqOpezM5vcv6p7MzZPcciUt0hOSfgKUhT3LkTSsj8mn0dXJszJq0a3RY7WZ+f/8A50p9ep0S3Rf1TrAylolui5rXWCjj/jtp8eK627ml6syUafWXeVL3Zmno4+MP7RQLWwYL4NCLYD4DYEgBbAYbAYw5kJWzg4rcAOCpXRa0UHPOkkVvBpdKSUnJ83SMt/Dek0OFRitvBrYkkkUNGrijQxujjvjjaT0dFbB3sBFkyaoxuOfARllyVMj3HZpFZu2c+u9Jb0bNOD2Rk6R/NRq43sju/j34MnATWwS4Ilwejv8A1VGdqlszFzJqbN3Uq0zG1C+c8zzXiKrOyK2G9p3acfaRXaSojO05RLlCEh2NboFIZBU0bZoeHwR4NTTxWxm4FwjU0/g6NiNHDHgv4VwUsC2Ro4UtjCqPhHYYkdDgKhBKCRCRKRQEkccjqGHAtBAsYKkhUlsPkhM1sIKuSJXmi5NbFea5Jqaqt06OUgcyp2hSn6iHT7DTK6mNiwM2rTMDVrs6jLarSf8AD+BvxdoxOrLs1uOS5aa/B/3mvj+8Tr4uadWkXoQqilomnBGhAx8hZh2OI1LcCAyzk00FewSVoBRb5GqkiFFTtbLkFRa3Y1K3ZzQ5CLaQuUExrTBpmuSUs2FNMpwj2ZbXqa2RWjPzKpWjr8WuI1HounT7sFL0swOq4/g6/LFKk33L6Pf99mp0nI9l4K/2lw1PDmS5Tg39N1+9nXPcP+nn8srnVh43wIyJqV0MxvfciiL2NlvHIpY5bFrG0T1cXYOxyK0H7liLJ6ZyCQCYaYGNEohBE0kUdRJxnQKISQMQ0RYEoJEBLgmKiUSQiS4pJBxxQcwWEwWKlSplTK9i1kKmV8iQp5nsZmplszRzcMytW2kzbESxtfk+VoyWXtfK3RRZ34npCGyLOfBDZZushsizmwCCLObIbGEtgtkNkWPgTdjcKuSQpK2W9LC5oWvgjY0GJVb8IuOgMMVDEkTds5q6czkEg0gUhkURVDig0iIoNIikJIt6ZboqpblzTLdEIq4kSQiXwUlye4aewryGnsMkti29gmwJPYcBc2V8rHSZWyvkuJUNXLZmc3Ze1b2ZQbKiQvcitgkrJosiJLY2ejR+QyZI2+kxqC2MP5N/wKfXo9Et0WNe6xCtEt0H1F1iOf8Aj/G39PE9Wd6ivqZ9F3qjvUv6FM9DPxggh8EsF7Isi2A+Q2BIYLYD5DYDGHVYaQKCTFQKy/oMna0r8lAZhm1JURZ6N7bQ5E4o0IzR5rpurTSi3ujax5k4rdGLXOvTQU0ldgTyFR5vcGWbbky3BTMk7E3Yl5LfISZybyXVzSuppGvi3SMXTupo2cXCNv49PP09ES4CXBDPU3/qpS1C2ZkZ185tahbMyNQvm+88nz1NIrY7tCSDrY4yKo6hlAtFQBSGQ5BoKJvmk8Tg8GppvBlYXVGlp3wdOxGrg8GhidIzsEuC/idmClzG9hqQnGPSGEpbk0dRNFBCJOOGEMhok5gCpIU0Pa2FtCBEkV8iqy3JFbLHYmlVDOtrM/LNwbd7Gnmjs1Rl6mOzEiphmT8lnHkT8mL3uE6LeHPxbKueCVsQlaMzrMbnil6Nr/3+Baw5U0tyv1Rd2GMvRpjx6qr8Hok1BGpidmZoqcEaMNuDLy/U5WYBq26Fwe1jYcWctaGrgmtgU2wr2JU5W2S0SkqIdvYcAHyCwnswHJI1yVDNJoo54b8F1tNciMqTTN8fUaH0yfblo0us4fj9MyNK3BKa+7n9jZjaV9udfU9JBLJp+18NNP6NHbg8/HiZY7XApwcHwabwOM545LeDaf3MTkw+xnfpK+NlrHKkV+xxY2L2Jqou48nuWsc7XJmxdFnHNog19MYmVcc7HxYdM1MNOxUQ0xWgxHAp7hEUCXIaAXIaJMdEohEgcSiSESVFOOJOKCHwC+CWQxFSchTylyZTyrkUiKpZuGZGsezNfOtmY+t2TOjxorz+rVzZSexoZlbZSzRabOzPxJTYLZzZDZoaWwWzmyGxhDZDZ17gsZus7yRZMVbGDYK3waWhh86bXBTwwto09LGjLSpPbRiriSkdj3QyjF0QKQ2KBSDSJpmJDELQaM6mjXJb0/JUXJc0/JCKtol8EIlvYZUDYV7C73CTCJS2BJ7BSFSZUIubK+R7MdNlbI9i4Shq3syg3uXdW+SinbLiRpUjjr2IbGSHu6N7pkaxowVvJfU9H02P8mjm/lX/ABGfre0a4B6m6h9w3SLYr9UdRf0I8E/xa348V1B3qpexVHa2V6mb9xFnfn4wcDIIGTKhFsWxjYtlQAYITBYBBKZBxIGmNxryKW9D4qkiaZ2ObhJOLaaNjSa1TilJ00YsRuNtO06MdG9D8bbkF5vczsed0k2M+JZjqn1dhNt8liDszcM22aOE5thcwbTX1NvB+qjDxbSRt6feCNP431efqylsQwlwRI9jU/xNVzr5TI1C+Y2M6+VmTqP1vvPF/kFopIJLYFINHJCRQLQytgWioANEpEtHLY1zSeEx7F3Tzaa3KsUPxOmjr0Gxp5XW5p4HdGRpnwaWB8GFU0cbLEGVcTLUBwzKOJXBzKCAWSyHyMnEWQ2Q2AS2LZLdnCAGIyKyw0LmrQqSjlhszO1GPk18kbRR1ELT2EmvParG020JxTp02aOqx7N0Zck4T9jXN7OJ+NPT5OEO1b79M17FDBOvJccu7E17C5yqhnTZXBGrFWjE6fJp16G1jdpGXlnsodBNv2HpUhWN7Dk9jlrQcSWwYslbuyTFbOs7aiHwOQy5ySVlPJlbdJjdRJ1SKptiIo/iOuSHO0wAWbRIsc6yp+56TQz78SXqjy6dOzb6XltLfbg6c0ZB1DD26tzS2yK/v4ZSnj9jc1+PugpVvF/sZmTgTucqqz54/YW4UXpwEuHsR0dIWw7GwHGmTF0yaa1jdFmDtFOD4LEHsQcWYsNMSmMTEZqYSFphJkgyIxC4sYmBwSJRxwzSiTjhxSTjjioQWQwnwCxAnItirkRbmVcqCIqhnWxk6yOzNnMtmZmpjaf0N8VNedyQ3ZVzQtPY0ssPmaK2SGzOiaSyMiabBstZ4clRqmb5vQ5sFs5shssIbIbJbA5GaVux+KDdAY4Wy/p8VtOtidUQzBjpJ0XsKpgQgklsOgqZla1k4tYx6SaEY+EOTM2sEkSjkyVyKmJBJgphIipo4su6fkox5LunZnWdW0RJ7AuVIBytiTRBJ0LTCTAkti5Ow2xci4CsjK2R7MfNlbI9mXCZurdWU09yxrHyU09y4k5M6wLOsZGQVzS9z1HT4/JH6Hl8G+aK9z1mhVQX0OT+VfUh5+trSrYodWlSf0NDTfqmV1me0g8P+rS/Hi9TK8837i7Oyu8svqyGzunxzpIb2OsCTKAWwGEwWUAsFhMF7CCDjmcSBw3aRYRWx/rIsoi04NDIC0MjyZ01iPAxMVBjEzDRrOn5NPF4M3Trc0sXCOfYi3je6NrSu4IxcfKNnSP+TRf8f/Zc+ri4IkSuCJHt6/1NXzfqsyNQ/mZr5nUWYuqfzP6ni/yC0GLDXAmLGpnHwhkMiyE7ZUCSGiTjSB4VD4JpWKirZYhG4s69GuaZ7I1MD4MnTukjU074MKGliZag9ipiZaxvYIZ6exLIjwcywhgtktgsZBbBbJbBYglMkFHWBOkA1sGwWAJmtipmjaZekitlSdklWRqIWnsY+pg03segzxuzI1cFbsvN9oqjhnTLsJ3GjOT7ZtFnHPY01Di3omviNe5t4kqPPaefbn+pu6fImkZeWCfVxOhqtio7jVSOSxoNOtg1wLW7GLgXDcd4Is5ukOQ1fKrsqSjTLzV3YqWNMuekVUaBe5YeN2d8IuaLipNNFvpebtydrfPgDJj2YjBL4eoTfqb+Pafleua+JgaflUZkobGhpJrJgVO6K2aHbkkvc28nuSrUpwEzgXJRESiYEqSiLaplmcaETQh1MJFjHIqJ0xsGRVLkXY1MrwdjkyFGphJ7i0w0yQbFjosQhsQhnLgnkFMJFQ0olEI4qKSccjioSCGSyGKgqasrZEW2hE0LqVDMrTM7UR2ZqZVyUNRG7NM1NYOoi1NlfJG0X9XCnZSkjeVKhnhszPyRps1syTTM7NHdm2NEpyIsLIqYps6oEtkxVsFbj8ULa2C+gdgx20q3NbFjUIpVuI0eHiTRdUTDVbZiEg0mSkGok9aJgxqYCVHXQjPUglIr9xPeILCkEpUVlMLv9yKi1ZU9y3p5GYsm6LmmnaMqztXnK0SmKTsYuBEJE2DZ1gBN7C5PYKwJsqAnIyrmezHzZUzPZlwmbrJblK9yzrHuylZrPiT1Kwk9hKYSYwt6NXqEeu0aqK+h5Ppi7tSj2GlVJHD/ACr7PLW06rGYPWp1Gf3m/i2xfceY67Osc3fhmni/1itfHk5SuTfqyLAu2zrO6MRtgtkWRYyc2QySGARIFhMFkgLOOZwqBY9pItIqLZlqLtJmejhiDQtBoz6ZsXQxPgSmGuSKbR0+6RpYuEZum3SNLEtjl2ItY+Ua+jfyGRjXBr6L+bH4P9lz6vR4IkSuCJHuX/Q1bUP5TE1b+Z/U2dS6gzC1b+b7zxvOWwwYxMrwkGpnLxJ3cEnsJi7GplSAZxyORcN4rGty1BbblfGWcdHXozMCp17mpg8GZjVTNLA9kYaDQwvYt42VML2RaxhFLEeDmzlwQyicwXwEwGBBaBfITBYBB1nENiCbBbOsFsCc3sV8nDGt7CZvZioU8y5MvVxtM1MztMzNTwyomsTM+3IFCYOq/XFwlR087EruGX8qm/J6HSq4o8xjn86fuei0U32K9zHyT0GjB0PTTQiG6Gwo460hsXuFYtbB2Sp1pAt7k1ZyivQqEFtENoY4r0IcV6FADojbwE4bgOLXAgDItmZmeThks0sjdNUZWre5XjvNI09J0XP340m+UXdTH5k/VGB0DUVNQvez0eoVwteNzu+5sOe4oyQmcSxJbCZIwNWmivNFrIivMRENBQYLBumhURcg6HxdlPHK6LMGY1cWE7DixUXYyLJM6I1cCosbEZjQaYCCTKMSJITJRUUk4gkqBwLCBYqQZCciHPgXkWxKapZEUsytMv5UU8y2Y5U1kaqFpmZNVZtahbMycyps6M0lLLwyjmXJeycMqZVaNc32ln5kV/JayrdiO3c7M30ToRtl/S4nOS2EYcbbSRsabEoK2tydaXmdPxwUYpLwMSIQaMXRI5IJI5BoRhoiQbAbGKFsBy9yZMW2CaYpk9+wmyHIms6cp78mjo3aMdT3RraF2jDTP+2hF7BpiosamJQ0Q2RZDewB17gTZLYE3sVARN8lXM9mWMjKWd7M0hMzWPcqWP1ct2VLNp8SYmEpCVIJSHwNjoy7s9nsNKtkeS6DG237nrtKuDzP5N/zPDTusL+h5Dr8/wCSmeuyvtwN+x4b7RZKg16s6fF/R7+PP2TYtM5M7WRiZ1g2dYgKyLBsiwIdkPgGzrJDmcdZxIcizidxRWQ7C969SNGsINAINGdppT3GJ7gErlEU2ppuEaWIzdNwjSwnLsRbxrg1tF/NmVj8Gro/1EivB/sufV2PBEiVwRJnt2/4K/tT1b+VmBqnv95u6t7M89q3Tf1PH86NAjIYnuVYTdliLOeJPgOQiI1MqGYmSgUEuC4bxMJUWcT3KMWWMU9zs1DX1tNe5fwPZGYpqky9p5qkYahxq4XwXMbM/DOy9iZKlpcEM5O0cMkMFktkAQWCwmQxEF8EEsBsA5sCTObAlLYQDKQmcjpzoRkmBFZpbPczdTLZouZppJ7mZqMnO5eYms3VO5iUFmdzbBR1T4kyDaaPQdPncUrMGELNXpk6dPlGXkno69Bj4Q+Lor43cUOXg4bOKhqZPkGPIaIUklEElQ0nHJkNl8JDAbR0ntZXyZaDhGTSaZma3FdtFqWZ0VsuS00ys55eo1S+lyePUq3W57PE/iYE/LVHicTrKmvU9h06fdgSu9rO3J4LmhM0W8yqbrh7lWaMdT2pWmVplrIVZk8Ihi26DmxTYWA7HLgt43aKEHuW8cjHUUtxYxMRF7DUzNR8WNiyvFjYtAZ6ewSYtMNFQzEyUwEyUyjMOIOKNJD4OOfAUAYE+BjFz4Iqaq5EU8q2ZcyFTLwxSpZ+dcmTqVTZrZ/Jl6pcs3xUVm5irNbMtZSvNWb5JTyRsWsdstShZMMbb4OnNEHpcKW7RoRTYGLHSSLMIk29b5nHRi2NUAowoYokrK7SWqHdvsQ4B0yXwKk6HyQiSGOlti2w5CZOhpqXJIW5+4E5iZTt0KstVYg7kmbehfymFhe6+puaF/Kc+2caEWMXApMYmSsRDZIMgAGxc3sHIVJlQE5HsUM0tmXMz2KGoezNckytXLcrKQ3Vvcqpm+Z6SbZKYCZNjKvU/Z+H8kn6nqdKt0jznQ4Vpo+6PTaNW0eP5r3yLz8WtW+3Tv6HgPtFO2l6s951B1gaPnnX53nSO7xfYXkZFhJi0wr2OtmZZ1gJk2ICsiyLIsAmzrIOJJNnWQciTEmMxumhaDRNJbTtINCcLtUORjVQSDjygEHD9ZEUNPT8I0sPgztNwjSw+Dm2cW8fg1dH+oZWPwauj/UK8H+y59XFwdIlcESPZv8Aor+2frHszzesl87Xuei1r2Z5nWP+Vr3PK8zPQIclnGyrFjsbOYluLGJiIO0NTHKZyewSYKexKLlDwSY2EqZXsOMtz0bFNCM7iW9Nk43M2E9uR+nyU+TLUDfwT43L+KZjafJdGlhnZhYqNGE9g7srwnsMUgMbdENoFysFvYCG2gWwHKgHMRcG2KciHP3FSmIhylXkRKZE57clfJkryAFkn7lXJkryRkyc7lPNl9ypOpdny87mbqcmz3GZs3O5n5MjnJ+h0YykN72wo7sBMZBWzawLOFbFjA3jzKvIrGqQ5xqpLwZU+em/p8lxVluLsztE1KCdmhFKzh3PYh8OLDTAjxQS4MqsRyZB1bFQC8EMhWiXuiwXkexRyJ2y/KNoTkxX4DpWKE7SETtsu5MTXgS8Lb4D98TYrLZpno+k5rgt/ZmHPFS4LvS8zx5VFvZnT49dic+q9DqF8qfpsUps0JL4mB15Wxm5GVue+tScjKeRlnIynmZCaTN7i2zpSoU5pvkfCPxljGytjdss4zHUWtQY1Mrx4GpmNhw6LGxkITGJiUsRkNi7K8WNi6ZUM5Epgp2ghmNcEgolMYSQySGOmh8C58DGKnwZ1NV8nBTy8Mt5GU8z2ZMSoZ/Jl6ndM0875MzUPdnRhLPyCGh2R7gVZ0xJShb2RZw4kq23CxYqVsdGO5pGmYKER8IEY4j0gaxyQaRyQSEbkgWkEC2ALkkyvkVD5OhM2VBVeaKuV0WZuilnmkmOM7VfJkraxeNtuxc33S2DxqirPTGruHwbmh/UMPDyjc0X6iOXYi+hiFxGIlQwZBIhgZbQmaHtCZouBWyooZ42maU1aZTzQ2ZpCYOqhbKnY14NPUQuT2K/w16G0vpKpTQUN2l6se8Xsdiw3mgq8odvpNew6TFRwQXsej0a4MLp0KxxXsb+jXk8a3vka5+A6pKsVex8661O9VXoj3/V5VBr2PnfVJd2rl7Ho+L6nyfVDyEC1uSdTNKZJCJEHWdZxAgk444RJJIORNAkGgUg0RQODppllO1sVoplrDFvZmejEk2OxQ+ZErG/QdihTMrTXNOqpGjhKGFUX8KObRxbx+DU0n6iMzH4NTSfqI08H+y59XFwDJ7ErgGb2PW1f8VM3WvZnmNU7yno9c9meb1G+VnmeZnoEWPg6EIbBnLSWYMcmVovcfBpomUHxYaYpMNOi+m8EdYfYC4NHrqMhPYPHkqbViUqBcu3ITYG7pMlpbmrhnsjz+indbmzhnsjDUONTHPYap7clLHMfGRnYo/uOchfcR3CCZPYRPJToOb2ZRzZKb3Diae8nuKlkt8laWZeomeevIcT1ZnkpclXJm9xGTPtyVcmf3LmSpuXNvyVM2bncVkze5Vy5W20mbZwTs2VzbSewqyDrNpJCEixhVsrxLmGOwqaxjRYjG016icaLGNGdUs6DI18rfDo18bbS3MPH8mdPhM2dPJNLezk8s9p+VciHEXDdBrk51pXNhEIlclQJq0C00MSOaNAWwasY4gNNE0FSgn4FShT4LDFMzv0K8o7bicT+HmT9y3NWipOLU7NvEzr1Ojyd+Bb26KOqXw8so+LtfRhdJyJwSv2D6rClDIvo/4HX9yufGXkkVMstxuWZTyzomZSXkmIU7nyRlnyKi9zT8+iaGKZbxtNGbjnVFzHPgx1lUq9BjUVsc+B8XaOexZiYxMTYxMimsQY1FeDHxkq3FFQ6LoNOyvdvYdjexUBiCBTJTGBHEHAYXwKyPYbITNmdTVbK9ilmezLeZlLM+RSFVHUPkytTLc0dQ+TJ1Et2dXjiKrvdh4sdu2RCLlL2LMUkkjpOTrkhkFbQKQ2CG1hsUNSAihqQKdRxJzAIb2FthSfgXJjBc3sImxuRlXI6KTaVkmUNRO9ixmnSZSm23uVGeqWkOggEtx0EGkH4dmjd0S/kzExLdG3o/5tHNoRdjwNQqPAaZCzUcwUyWMBfIuSGMBlQEyVlbLDZ7FxqxOSFouEx9RjuT2E/D9jRzY7kL+F7GkpcUfh7cB6bDeoht5LTxew7R4f5dOuCdX1SsbmkjUFt4NvSKomTp1SRsaZVA8zxzu2kZnWZbPfwfPNa+7VZPqe961LaR4HUb5pv3PT8X1n5P8AZXa3ICaIqjoQ4444QccjiUiaTkiSaJSJtCKJSJolInpuS3Gwg2TjhfgtY8fGxndAOPH7FvHiqtgseP2LWPH7GVo4GOO1wMjCmNhANQIp8TiiXcK4K0I0WsSMNKWoco1NKvkRl4+UaulXyI18H+yp9WVwBk2TDXAvK/lPU8n+qmRrnszz2f8AnWb+uezPP5t8jfueb5PdZ0KGRAiEjn1CNTHQlRXTGRZlfQW4u0GmJxy2DscpvHWiaTOaOo9lQXC+CvmTUkXEJ1ULipLww6VO0cnaNvC9jE0a3Rs4VsjLf1UXoPgsRkVsfA+PBlVG2c2CcLhum9mY2uzdk+TWyOos811XLWRehcz1npMtQ/ViZ6i/JQlnt7C3mZrMI6uTz+5XyZ72TK7m35AtmkwByyN+Qbsg66L4STkRZKe4+AyCtl7EqSKeFWy9jWxnpUPxosQQnGh8DOqHOLeO1ytzR0T7oJ+pSx7osaKfZJwb4exh5Z6TfvWrDYalYrG00mNTORSUrYxLYBcjEXAlIlI5Ik1AXEFrYY+AG9hWBXy7K0VJZKe5Y1DpMz5NtkXPsqcsya3F5JpimmBJNMvOeM71r9KyVOrNjWY/jaOcVzVr6o81oMvZlSflnqME1OHrsdOV5+PJZJ+Slmye5d6tB6bW5cfi7X0e5kZJttmsym/QydslbEUSiuA2DplrHMpJ0OhKjPWTaWOfBYhIzsc+CzCZz6yqVdTGJlWMxikY3JrKnQyEm2ivHfyPx7GfFSrMOB0BEGNixwzkSD4JsZiRxCfgkAGTETY2TEZGRSV8zKGd1ZdyPZlDO+RyJrO1MqTMjK7mzS1bdMzHvJnZ4p6SZBUhqFRYxM24uGxVjooVAdHgFmRGLgWgkxGMhsiyGwCJMTJ7ByYmcqGXS8kinmnyOyzpOilklbLkRaTkbbFMZIWy0VCW42AtDYE1Kzi5RtaTbGjFxPdGxpnWNHPqexPq6mGmJixkSVnIJLYCIxAAtANDqIcRyghoGStD3AFw2LlClOFvgD4a9C1KG4LiUFZw9h+ix1kbolxQ/Sxq2Ru/4hfwrg18CrFZlYVbRrQ2w/ccXhn+ao8/1h2ps8Rlg3Nv3Z7TqzuM/vPKZce7Z6GGWvrPaAa3LOSFCGqN0AZxLIYByDSAQyK2IoSkSkElZKRnaEJDIQt2Co20i3hgRq8MWPHxsWsePjY7Fj4LeOCRjafHY8ZYjAmEB8YEq4GMPYLtGRic47io4iC3HwQpLcfjRjoLGNcGpp1UEZeNbo1sCqK+hv8Ax5/kqfTnwJzP5WOZXzv5Weh5PimPrntIw5q5M2ddLZmO+X9Tz9/WdAkEQd5MakaDiLTGJmWoZ0HQ1PYQmNi7RMN5aiKGUc0j2ll1udNd0GmEyGhFR6NUzYwrZGXpFUzWwrZGWvp5Wsa2HRWwrGth6WxFWmqIYVHNBARm/VZ5TrH84vqerzfqM8r1bfMkaY+stsuiKDaBaOhIGC2G0LZUJ1nEWRZXAKwlyAHDdhYFrAtrLkEVsK2Rbxrgxq4fjWw+KFQQ+KM6ZuMmX8nkjO6t02djXA3Lj78TrlcGevfoWdjQ0824J8lmLszdBk7saT8bGlF2tjis5ShqCTFph3sXDGmSmAjrZpAJuhcmw0rBktgClqHyUm9y/njdlKcGnwTamo+pDVkbhJWwmuFxEPkmmvDPQ6DMppepgODqy503O4ZVFs2zronpH2q07vDqUtv1JfvX8TzTR7vqmFarpmWKVvt7l9VueKlFPwdeb6GiCUG4EdtFEgNOgaYcYNk2A2EnsWsbK0ItD4OjHZxagxqZWjIZGZhYqLcGWIMqY7ZbhwZ2KixBjouivBjk9ieKOTJTFJ7hphwDT3Cb2AT3IbFw0SYjIxkmImxcJXyPcp6hclrI9ytldplZiax9WtmZr5ZratbMypKpM7fHPRCQcRaYxM0WdBjosrxY2MhGsJkpiVIPuEfRNkOQLYDkPhOlIRkYcpbFbNOk6Gm0nNPwVm9wpu2xbZcZ9QxbCbAbGSU9xsRKY2AqSzj5Rq6eXyIycfKNLTv5UYaEq9jdj4MrYuCzAzV06IxCoDkI0pE0ckFWwwGiGkHQLRUpkzjuKaofJWLaKlMposadVFe4posYVSSI8v8AqFzAt0aj2wfcZunW6NLLth+45fB7tOPN9T3T9zByY7XBv9Q8oypxO7KKyc2Or2KeSLT4NnLjTTKGbHzsaSs7FBrcBodODTFMskIbDdCfI7GTr4DUgkjkgkjC0JhG2X8MCrhW6L+GKpGWr7OLGOHBZxxF40WMaM1wyEaQ1IGK2GxQGlLYhoYlsBIKEJbjoIUluOgjGksYV8y+pq4lUUZmBXJI1Ma2Or+NPa4N8FXUOky0ylqnUWdnlpsTWy5Mxsva2e7XuUG9zh19ZBbIs5s5GdhDQaAQyJFhjQ2LpikGmZ/kPPtEMJ7IFnrRqA45nN7CqVjSLezVwrYzNGrNXCtkZVUWoDoicY9EKSQySBwE6j9Rnk+qO856vUuoP6Hkte71L9jXP1jr6ptAtDGC0awiWhUuRz4Ez5NYA2RZzZDLAkxuNWxUSzgVsnQW8S2RbxoRiXBZgqMKs+CHxVCYD4GdM2C3RZilVCID4uiDIi/0fUtf6Mt0aWLImk0zO1cXOCceVuM0ea4pN7ow8mf7Z31WqnsMT2K0J2kPT2MpVGRdnAphFdAgZHWC2P8AQJyKyvKFlpqwHHcz1QrfCTOWFrgsdgSRHaXFPInFCMORwzpr1L+SCaKGSHbktG/ivtN9PU6WayadX6UzxepxvFqcmNquybX7T1PScndiSb8GN9oMDxdSc0tsqUvv4f7jvxfR33GU0jlGwlCxqgkVbxBccd+BqgkEtjm6JttNDSOtIFzITsX5M5Mdj3ZWgWcaIsVFrHsWsfBUxsswexjYqHp0GpCUw0yeKOg7Y5PYRjdDUyeAdgNkOVC5SFwOlL3E5HZ05iZz25DgKyPcrzfIycrYmbNMwlTUbpmVkjUmaed8mfkXzM6cJJWwaYLVHWaGamEpUJTJ7hKWFOie9lbvryd8SvIcLqz3gufuV3k9wZZNuQ4Vps8iS5KmSdsjJkvZMS5lSItdJgNkOQDZRObAb3ObIW7KSZDdj4LcTBDomegfDku4HsilAt4XwY0NHEyzBlTE+C1Axqj4DkJgNTJ6ZkQgEwkx9NIMibIbLhltAtByBe44A1uOxrdCvI3Fu0R5r/iF/TK5Ivah1ia9inpVc0WtW6xmf8efVR57X7tGfJF/W7zKckdcRVXIirlhfgvTRXnHkpNZmWHNop5IU2amWHJSyw5LlTxTfI3G9xc1TDxumGvhLUeEEiIO0F5OamfhW6L+HwUMbovYHZlr6cXYIsQRXx+CxAmLPgthqFRYxOgBl0gHuybsEWgKKGxFpDYIyoWtOrmjTgqRn6VfMjRjwdv8WKjpcGdq5bMvzexl62VJm3kp1h6yVzoptjtVK8jK7Zy1i6wkAnbDRPANcjIoWg0yeGYg4gJhJiuTYLAbO7iG9jvWhsFvagXIHuuSQVNaWkVJGniWxm6RUkaeLgwq4tQHJCYDkSaSGccxwKuqdQf0PJ6t3qJfU9TrXUH9DymZ3lk/c1yy19LYD4DBkjWAmfAjJyWJFea3NcguiGE0RRYFDkuYFsVMa3L2GOyM9CLWNFmCEY1wWYIxqjYIdFCoj4ozqjIOhvckrsUuBOoy01FPkkLWF98/VAzh8HUWtoy3+8LSKoJvlh6pd+JpcrdCs76K/FzC00mWE7MjS6h0oy2aNHHO1ycevVTKsJ70GuBKe43u2I/RisiQLe5F2H6Nz5Oo7jc60LoRR2yObIF0ByVRR1CLs3SKWd2XjXKjS50jLU0mx32iw9+LDmS/VbTfs+DL0Obszretz0Gsh+ldMyJK322vqtz0cUs3seVqjjmwHIsht+4ty9wXNC3KypkhuRKluKsKLK4cWYMsY2VIMt4jLSotY2PgIhVD4Mxq4amGmAmGkTxRuNjHLYUnSOb2JAnL3FSkRKQqc6XIcCJz3EznsBOe4qU7HITpS9xU5HOQqctjSQFZnyUZ8ss5ZclWTu2bZiSpMCyZ8gN0XAKzu4W2Q5DBrkC50KcwHMfC6a8gqeR+opz9wHIcibRufuC5gN2C2PieicrBbIsixgVhRQC5DiKg2I2L2EobAzoOg9y3h5RUh4LWHlGdDQwvgtw8FPC+C5Dwc+jWI8Bpik9gkzPqjEye4VZ1jlBykTYnuonvKlA2wWwXMhyLiktljCrVlRy3RcwcIy899BpaNfMN1jqBGiW9+xGtfyleCcyr+mBq3eQqssah3lYhnTElSQiaLMkKmhpqlljdlLLFbmlkjsylmW7KiazcsabFxdMsZo8ldKmX/RLmN7DBOJ7DLOez2DsbLuCVFCDpljFKmjLUDWxS2RZgzOwzLsJ2iFyrSYakV1INTsDPTJW7Fp2g47kaI2KGwFRQ6HJkcXtKty8uCnpVsXPB6P8AHnIqF5HSZj6+ez3NbM6izC6jOk9w3S1WJmneRim7InK5v6kWYshLkamKQaYjMTCTFphpiMxMYmJQxMRvMuZDn7iu87us7+KG3Z2LfJYDlUW7D0yuSJ18JsaVbI0sS2Rn6ZbI0MfBzVcWoDUhUBiYjSQyfoC9yoFDqDrG37Hl57zb9Wek6pKsT+h5trc1yyv0NANbDaAfBpCV5iJ8lmZXnyaQwUdQSRyRXTHijckXsSpIrYY72XMa4M9U4sY1wPiJgh0TKmbBD4CYD4kUxN0m34KCbzaj2THazL2Ymr3YOihUO9rdhITQxvsVEZJqm2xTnTEajJSSTFIQ3d98eS/pcvelvuZuHJ4fA5SeGSnH9V80ZeXH6hWNmD3Q26KGHUqdNMtqdqzgs4IY3Ss5StC3NPYlNUIzE7RDRCdcAt7h0Oba9zlJVud5OdNAESaaKmZWmPm2hMnfJU9FVKFwypt+T1fTprJp0nvZ5jJFKVm50fKmqs7vHv1EZ+vPdRg9Nrc2JukpNr6PdFN5L8m39q9O4Z8Opitprsb91x+w87Z25k50ans7uOsT3EqRRHWFEQpDYSJpxZgW8a2KeN7l3DwY6VFmHA6IiA+BnVw2IxC4vYNOiFDboFs5sXJgESkV8k6Qc2VsstnuOQi5z3e4ty9xcp7gudIqQDcxOSewMpiZzvyXIQZu2Ikw5MTJ8mkIE2KbJnITKW5pAPuBcgHIBzHwhOYDnYDkC2PiKJsGwWyGyiE2RZFnWAScRZwASDiLQxMmgxMZEUmMiyKD4FvC90U4FrC6oyoaOF8FuDVFHC+C3CVJHPo1hMKxSkHZkYrIbBbBbGBORDnXkW5C5ToqA/4hPeU3kp8krIaQ+rXdc0rNHT8Ix8U+7Kja0/COf+RffDla2iXytide6H6RVjsq6+XJ0eKcyr+mFldzf1FsmTuTfuCzYkNCpDXwKkgBGRbMpZkXshTzIqIUMy5KjVMu5VyVpLcuFU42PTEY9mORlr6Qovcfje5XQ6BnYOL2GdUXYT2MyEmi3jnsRYa8phxluVVMZCe4uH1cix0HuVsbuizj3Zjo1iK2HY1uhMUWMatog1/Tr5VsWfAjAqihzex6Xi9YXPivqHSZ5zqc6TN7VSpM8v1TJu1fkz3UbrNbtkim6YSdkMjUw0xDkHCQlLCYSFxYxMRjTCTFoNDN5DuJTAslM7uGKb2SLWkVyRTW7svaRboz2TY062RoY1sUdOtkXsfBzVpFiHAwXDgMRuOOYLezKgrJ6tKsT+hgG11iXyUYprn4xv1wDWwYLNICZorT5LMxE1uXABBJW6ISGQjbGpYxLZFqC4E44liCMrTNgh0QIoZHkiqNihqdIXBHZprHicn4RIqlqpvNqVjXFmjFKEEl4Rn9Og55JZWuOC/Jjv8AxIZSpNmfmyXPktaifbBmRky/NY8zpL+PJVbl7DkUlT3TMOGbfkuYc3G47DX2p6efdFt43+wv4dV3xSRSxZVONPdPwQ8U8L78e8fTyjl8ni/XuFZ/xsQk2rYxMzsGri1Te5YWZS4ZxWWF1aUjm0yu8nuT8RBwdPvbYByYvvI716jmR0U26ETbDc0/IDaZcynpGSTLXS9R2ZUm9rK84pi8acMia2pm2JxPfb03WNP+m9JyRirml3w+qPBNuz3/AE3OsmJJ7ujyPX9E9F1Caiqx5W5w/ijt8OuzjS+4zLO7gGzrN0GqY2EyqmNgyaa/iluX8L2MvE90aOF7Iw0uLkWNiytFjYsyWsJhJiEwkxGbYLYPcC5AA5GVczqLHykVNRKoNlwqqOe4tzAcgHL3KkLo5SFORDkLci5CdOQqcjpSEzkXIA5JCXK2TKQps0iaJsW5HNi27HImpbIsiziuEmziLOsAmziDgCTjjgCUw0xaDTJoNQyIlMbFkULEGWcTKkCzjfBnQvYnwWYyKWNliMtkY6hxaUgu8rqQXd7mVgOcwXMW5gSnsLgE5ipzBlMTOZchOlk3OWX3K8p7gd5rmDrV0Mu7LZ6HTrZHnekq52ej064OHz/78Xn42NOqxIzOoSpS3NPHtiX0MbqU6TOvx+sxd+Mi7dkgJk2aklsVJhNi5PkAVkZVy8FiZXyDiaqZEV5ItzQiS3LhFRVMYkCkMiiKEpDYoiMRsUQExVD4NoWkGtmLg4epsbiduyqmWsCsnU5CXcXCLmJFXCuC5jWxy36Z8SxhVtCIotadXJBPql/EqQU3SZ0OAcjpM9HPrC2drZ0meU6lO517npNfOkzyetleV+xlplukNnRlvQDdsG6ewuIObJjJpi7tWSmLhrcHfA5MqY5U0WL2slUMUqDUiv3hRmAeYaaBbrYbHdUDODT4O5VRBWzR0i4KGNbmnpFwZbEauBcF3HwVMK4LmNUcyz48BAoIRuYEnSCYE3sy4VYPV5W0vcy2aHVHeVIoNbm2fjFAMg2gJFwyZrYrzW5amtivPZlwBSHYY27FRVst4YbBTPxosQQvHEfBGNUJIYkRFDUiVJiqKXUcjUI448yZeeyMtJ6nqNcqL/cGfvSrQ0uP4WnjHy1bDkyW62SFZHSbF/ZKOvyVGjInO3yWeo5rnSfBnOTOjGfRHwyU+S3hyu0ZsE29kXtPBt8BqF1q6ebdGlhk9jN08KSs0cLrk56fR5tLHL82N9kv2MR3Z9OqyQdeq3RdUkg1Mz1ma+jnVGOsTfIxalPyhuTS6bK7niV+q2f7BM+l4mv5PLOH1pozvglT+a56lJ7NHfpKe9iJdLzreOeEvqmgJaLVx4UX9JB/4cLlWP0pJ02MhqIvyjKy6bWR3eKT+lMSsmbG/mjJfVD/APKp9vQKSfoFVsydPq7aTZq4ZqaVMy13P040en5HCa3LX2g0X6b0yU4K8mJd8a81yvwKWnTU19T0WnXdhSluq3N/DrtaZj5c2A2Xer6daTqeowLZRm6+j3X7yg2d6LOXgk9xkGITDg9yaIvYnuaGKWyMvFLcv4ZbIw0qL0ZDVIqQl7jkzNawpBKQlS2J7hcM3uAlIBzFzmOQJnP3KeryfI9wsmSvJQ1WS4vcuQrSnMFzK/ffk5zNOEa5gOYtz9xbmVIBSmJnPYiUtxbZchObAbo5sW2VIlzkDZDdnFlU2dZBwEmybBJsAI6wSbEE2dZxwB1hJgnIAamNixKYyLIsCzBliD4KsGWIvYzoWsch0ZFSEqHRkZ2BZUwu9UV1ILuM7Ac5C5T2BchcpC4BSmInImUhM2VIQZSA7tyJyFp219TSB6Po8agn6nodOraMPpMaxI9BpVbR5nkvfI0z8ab+XD9x53qc6i/VnoM7rE/oeY6pPx7nfn4vSipE9wpM7uNCMbFyZFgtgAzYmW4yTFNjSVNCZLcsNWhTQApLcOETqoJcioGqQSYBKJ4DU6JTF2cmHAcnbRf0y2RnY92jV0y2Rn5PhLmJcFuCEY1sWYrY5KZkUXNOt0VIIvaZeS8TtUuJUhWZ0mN4RV1MqTPQvzi2L1KdQe55fPLum37m91XJUWedk7bMfrn1fYWwWSyGMkxdbMKxbCTfkVgOg9x6e1FfG02htkWGKyU2iFRIKYGN7lpw7oWVYLcv4F3QpnXa0k6qKNOjT0i4Kc4VP7y/pFSRjupaOFUW4FbEti1AxWcuCSFwEotiAWwMj+Vje1+gGWD7HsaQq811B3nr0KtbF7V4ZSztpFR45R5TRrKyJaBaoc0LmioCJK0Iktyy0Jmty4Ycatl7FGkivhjb4LmNbE6OGwQ5IGCpDYozqhRQ1IiC2GJEKJ1E1jwZJeUtvqVOlY2lPK+XsmH1Ob7YYly3bLWDGsWnhCt6tlf0TpFXUT7YNlmbMzqOTtxvfwPM7SY2pyd+VsVCDm+A4Y3OVtcs0tNpOG19x03UzE2k6fSt1tsaWHTqKW33j8WFRXAxtRW5za10cRGKiNWRLyUsuoS4ZWeq35J5aOtdZlfIyOa/JjwzNtblmGSx8NqRyX5GKe3JnxycbjVkvyBrimT3lZT9yVP3KkB7khWSEZqmrAc9jlNFcTVPUaaEXcVT9huhyOM1Fuws7TiVITcZpp00yPJ4puIj1mCGyfqrNrA6xpGD0nUQ1MFCU0pJUrNfKs2PDJ4kpzS2V1Zx4zvNdEnp4j7UTUutZmt1ST+tGI2XOpTyy1mV5045G22mt0yjJnp5npjv669xkXuIT3GRY6S3je5dxSryZ2N8FvHIw1DaGOY9SKMJUWIy9zKxUWVI7v8AcSpHOYjNcxGSdI6U6RXyTKkAcuTncztVktcj809uTO1E7fJtmBHeQ5iu4iy+A1zBcgGwWyuEJsW2c2A2PiXNgNnN7g2USTrIOKCbJsElcgEnJNhKLY7Hib8E2yESothKDLccG3AxYaXBP7Pij2P0OcGvBeeFehDxL0F+hxRaa8EFx4bXAuWFrhFfoEobFguDXgJIVI7GyxF7FaBYjwRQamNTFIKJFI5Owu4UmTZFgG5bANnN7ANi4ENipsNsVNjBUmdDeaXucwsCvNFe5XyB6vpkaxL6G7o1c0Y3T1WOK9jc0S+dex5X3bWfFnVusTPJ9Tnc0r8nqNfKsVHj+ozvPR6OT0R3HdwrvOUrNCNcgWwLIctgCWxbZDkA5bjKjBaJTtHMQLZ0TpcnRACRNEpWxijSEC0mwlAOkTYgPDC5I1sEaSM7TK3Zq4VsjDyUlnGth8RUFSQ2JzmdjW5o6dUihjVtGlhVRRr4Z/kqfTW9mZ+rlSZem6TMrWzpM69VVec6tku1ZjNl3qeS8le5QsiOe/RWQDZ1jAiLRFkWAMjKmqG997laxkG6omwLEW2Ngre4nH4LCdE1UefSqRe0r3r1KdfOW9L+ujp01z9MyxqfBb0q2QnKk5lrTR4MNF/a9iT2LeOFicMS7ihsZqg4Y0kthqgq4CjEYo34HIZPw/YXlx/K9i6oC8sPlexpIVYOTAnN7CZ6dNcGvLDbboVLD7Anjz+fSVulRSyQcdmj0uTDzsZup06d7FypsYjQuSLGaDhKmhDVs0hQzCi3BCMMdkW8a2IqoOK2HQQMUOgiKqCitgiUtiMjUMcpvwiTZ0l+kdRrlRdGjP0KnTYNueVrd+S4029x1KvNGRrovJNRRuZIWnSKbwLvbaKzeFVLS6RRSbRoQxqKuiUkkJzahRTSYrbRwzJkUEUM+pSvcRqNVzuUJ5HN87F5x0j8uocm0mKjN3bYCOWxpyQl3HPgtQye5nQlXkfGdeSLDaMcj4sbHJ7lCGUasnuLgXlk9yfie5TWTbk74nuVIFz4nuSplRZAvie4yPnO1RUc6YTyFecxyJaGj1bxZE02qZ7XpmtjqsSt3JLf3PnCyNO0zZ6P1B4cq+bz5J1n+22Nd9PQ/aLokeoYHmwJLUQVp/73sz55kTjJpppp00+Uz65p80NRhU4tNNceh4r7YdHcMv6fpoNqbrLGK4fh/eXi/wBHvPY8qnuFF7i97GRHWKxjZbxsqYy1jdGOjWYMdGRXiw0zMz1LY5zFWc5CMcp7FbLPZjJPYq5HyXkE5JbMoZm7LeR8lHM9zbMCLIsGyLL4BWQ2C2C2PiRNgNnNgjJxxxww4445DCUrDjGzoxtljHC2iNa4HYsd+C7DGkjsWOktixGNHPrYAoV4C7EMUQ1C2R+gR2ex3w/YtrH7BfCoc0FL4fsA8XsXXBLwLcUXKFGeH2EPHXg0pR24K+SBc0FWKpliHADjTCjwFI1BpgIJE0DOIOJpJsFskFkgLFyDYuQAp8jtGr1CEvktdOjedMN3maHq9EqgkbmiW9+xjaVVFfQ29Eqi37HmeP3ttCupOoM8X1Cf+UM9d1SWz38HidbO9TI9LJb+h77CUhCkT3GhHOfuC5+4ruIbAdG5e4LluC2Q2MjYSDfBXhKmOTtCCJBQTIYyKoQGkkS2RZ1iN2x1kWQnbSANDSLg08S2RQ0saSNHGuDl8lI+C2HQFwQ2KMTixhVtGnBUihp1bTNCOyOnwT+15BldRZidRnUXua+olSZ53qmSoP6G2hqvNa2ffne/BXsPI7m37ixRg6zrOYNjArRFgtnWAEtxuPkSmOxioWobINMVFug0yLFRj/6TLelXzJlWMbZewR7VZvq+mkHN3M0NMtkZ0V3ZDV00aSMNCL2FcF7EqRVwrgvY1sTFw2CHRiDjQ+KKhhUNgckLXA9R2ImtiwoPHyLljLjgLlEOBQyY/YpZ8S32NacCnmgt9g4mx5vW4OXRlNVKj0mrx2mYWaHble3JcrOzlHiWyLWNWJwrgt4oWKqFFD4RCxwQ5QRBwuqRU1rbxKC/0mXpRFrB8XOk+EIWu0mHs08VVN7sc4VyW3jUI/RCG7d+AIqUVGDbKGaajbY7WalRTVmFq9Xs9ysy0j9Rqkk0mZefVNtpO2Vs2pc20vxFJ3uzoz4+fSMcnJ22SgUg0i6SUiTkjiQlOg1IWTdCsCwphrJ7lVSJU/cXAuLJ7hKfuUlP3CWT3HwLqyE/F9ymshPex8C08linP3Fd4LmORJrkHhzOE1v5KzkD30x8OXj3/wBnOoJ/yeSVJqt/B6HLCOROE0mmqa9T5fodbLDNNNqj1+g+0EJQUMrTpbO90ZWcby9J6r9lIZJSzaOVNu3F8GVj+yutb+aUIr1pv9h7LDr8U0msir0boZPW4Irea+4f/pP7P8yvI4/stqYQlPNnhCMU3sm2zKS7ZNXdNo9T1frMJ6aeDE1vs3e55ZO3Znq9TqSHR4DQuLGIis02c2cQ2IwyezK2R8j5vZlXIzSEr5Hsyjle5cyspZHua5AbBbIbIbNAlshsizrGTjjjgJxxBww4OKsFK2PxxJ1eAeOBawwQqEdy5iVHNvRmwikhyQMEMey2Oe0OSoZBWxCdui1hjdB0H48dpByx7cBY0lQyk0KUKGSFCWi9kgV3Dc1miV2hM4F74doXPFsXKbNlHchbFrJioQ40y5SrkGgEEhEJEkIkRIbBYTBkIBYuXkNgSECnyXulxvLfuUXyafR43O/cny3mKHp9OqSNvSKsTZj6dbI2tOqwo8/wT/NvPrK6tKkzw2pneebu9z2nWJVGR4bLK8sn7npY+o1fYkwrFJhWakOzrAs6wCWyGyLIAkp0x8XsVw4ugHVlOw06QnGxjZJibIsGzmxATYWLeaQpsfpVc7FfhNbTqki/jRTwLZF3Gtji3TPih0VuhUUOgt0ZnF3TIucIRp1sPk6R2+L1lpFPVSqLPL9YyUmrPR62dJnkerT7p17jtRtkvdnUHRDQ5WRbQDVDWgXGxgo5DHAGhhyHwQpIdBcCB8V8oaREF8oaRNNnY4bj+I0RGG/AxQbKtaC08Lkma+COyKWmx00aWGPBjVSLeGOyLuNFbEtkXMaCKOgh8ULghyRpFJS2Ikg0iGi+AloVJFhoXNUgJUmtipmWzLmQp5mkmIqzNQtmYuqglK0bGpnVmRqJWxxnUYkti5hRTxuki1CdBR1ci6Di7YnHbLEFRnTGkmFjqDvyQuAGm37EkfKbl9CtnnUXQ6nVFbUNKDb9Bw3nOpal45NN7mFlzSyS3exc6tJzztv7jOR3ePMk6VGg0gEhkUXUmJBJAxTGJGdDqJo6jqJCGgWG0AxgLZHcQ2C2VIB9xKmJbOsrgWFMLvKykT3C4Sw5nd4juOUg4R7kDYCbOsD4YpNOx8NROL5KtnWLip6a2LqWWCqM3X1GvqOWapzb+8xosdBmdzF/qr3xpTe7Y2DKuN8FqBnU1YgGhUBqIAgWwvADewEXN7FbK6ssTezKuVlwlXIynke5ayPko5HuzfIQ2C2Q2dZoE2dZB1gSbOsg6wCbIs44AbjVsswRXxFvGtjLdM7GixAREfBHLoLMEMa2AxoelaOe32ZMY07LeHwKUNxuNUL9A9OhkWJQSdDlI1pMD4dsOO41R2LlLiu8VKxcoFxx2EziXNGo5IL0KuSFmhkgVskKNJQpuNEJUOlEW0WmoRJCJAkMFhMGRIAwJBsXLgQLZsdGjwZDNvo8dkzPz/6CfXo9OuDZxbYV9DI063RrrbEvocngn+TfP157rUqhN34PEt22/c9f12dYsjPHPk9DDLX0SYVgIlM1IVnWRZ1gE2cQShBxyOOAjYOhndsJiFYqY+47uAsFsAZ3e5d0St2ZydtI1tDDZbGe/UJq4Vsi5jWyK2FbItwVI4dfVmxQ/ErkhMUWcC3FPqov4VSDyOkzsapAZnSf0O7M5lpPjL106T+h5PXS78r9j0vUclRZ5fM+7K2Rb7ZaJSOcQ0vYLtDqSHAjsLHad2Ico4R2APGW+0FwK6SooUxsE0NcF6HKFB0hY+BqQEVQxLYQBHHvwMji9iysVPgNY/YVrYOHHTL+GO6E44UuC5ijwRVLGNFjGtxONFjGtxwz4IckLgNRpDEiGSuAWywGQqb2GNiZvYAr5XRl6rJ2+TQzPZmNrW5WkJFUs8+663M/MqtsvbRTvkoaqVscRQQnTLWFt1ZRxptpGnp4LZi18C5iWyvYcmvHgUltQSXhGVM2Lt0PhBPkRBNVsPi2lYjFKCrYzNfFrG6NFzpGfrZppplQPG9RVzZnpGt1OCttIy63O7Hwq5IbBAxQ2CHSEkGkckEkZ0nUdQaR1CBbQDQ1oXJBASxbGyQqRpAW2dZDYNlgdhWKsJPYANMNMUmFYgOyUwEyUwA7OsGzrEo2L3HwZWi9x+NkaC3iZbhwVMXJbhwYUHRGIVEZEkJsFsJgNgReR7FTK+SzkexUyvY0ySrlfJRyP5i5lezKU3uzfIDZKYIRYcSDZIiSdZBwBJy5IOTGD8bLeN7FPG9y1BmGzWYblnGivjexahwcuzPgOiJixqZz6M1Uw0hSYxMyoMT2OshMJLYcopmN7lqLTRUiqHQkawj6TQqcAlJnN2ioFecLEZMe3BdaFTiXKTMyQa3K8lTNHLDZlHLGm2aykUcccNKGDIJgsABgS4DYEgBT5PQ9JVQR59btHpOlKsaMf5H+p5+t3TLdGtN1if0MvSq5L6mnm2xP6HP/AB42jyfX51imeTo9P19/yTPNHfj4w19QSccaB1nHEpAHJBJWEo2EoiAEjqG9p3aLoLSJoOgWLoQwZBMFjCcS7ppe5vaKFJGNpI92VM39LCkjDzUT6vYlsizERjVFhI4v7WbHkt6dWypBbov6ZcF4napcSpFbUSpMsPZFHVSpM7L8XfjE6pkqLVmClbNPqeS20mZyXkyZOSColKjhhFHUvQlEpWOBCR3YGkEojSV2Hdg9Rvwd2ACVAlR2HKJPYPpLyx78BLHvwPUA+z2JroKhClwOxqmEobBY1vwSZsFSHY+RaVIbjRUCxENARDRpDFewDZLewDZQQ2IyPYZJiJvYCVc72ZlZt22aWZmZqHSYIrO1E0m9zPyO3bLedtypbgQ0s57tOg6z72l4cdtUjTw46XAen0lJNqh8oqKpIzt6qQtypUhmJb7im0nbZyzK6TFVLdpukMTVFWM/NhLJ7iA8rpGPq81yas0c2RdjdmDq8m7LzOhQ17tNmQ+WX9VktNFF8nZj4Qo8j4IRHwWIBojEg0gYoYuDOkijqJo5oABi5DWKkOGVIVLgbJCpI0gIkCMktwEi4bkiSUiaAkIJEIkQScjjgNNnWQdYA2DLGPwVYvcsY2Z6NdxFqD2KmFlqBhQemGmLXAaZKUtgNhN7APgIReR7Mp5mWp8Mq5eWXkKeZ7MpT/WLuXgpT5OjIQcccUBHAnAE2dZJwEiyTiEANgy1B2U4umPhIy1AvY5FuEtihjkWYT4OXcNci9h0WVYSsdFnPqGemGmJUg0zOwzkw1IQpEqZPAsqQcZleM78jYs0hLEZJoNbiIMamXIBNASQb4BZUBOSNooZocmjJFbNBMuEzWqZFDskab2FVRaaBgsYwGhkBi5DGLkABH9dfU9P0xVjR5nGryr6nqOnKsaOf+T/AKnn629IrmjQ1LrEylol86Leqf8AJMy/j/G0eM6+/lr3PP0b3XnbS9zDaO/Hxz36E4KiK3LDkhkINs6EW2PUe1IVoCopBUFRxKg0cSQ+BAL4BYTBYwBsBsJgPdpe4yX+nwtpvyb+njsjJ0GOkjawqkjj8t9nlZxrYfFCsa2HRRguGQVtGlp40ihhVtGliVRNfHPap9FkdIytbOk9zRzSpGJr8lRbOi1WmDrZ92Vq+BEeDsku7I2SuDNkJsghsixgcQ4oCIxABpBJEIJDJKRNI6MXJ0k2/YfDTTfNIVvPoISCUG+E39C9j0a2b3LUNPGPhEXy5g/KEhkYkJDYLc1boaqPAONbjci2oCC+Ymj+zq2Q3GhaXAyA4DYhIFMlM0hpbpCmw5vYRJ7FEichGR7ByexXyS2Aqr5pcmZqHyXc0+TN1E+UFZ0nFjUp21ZqYMKmltsZuGVNGrpsiSRPTzxdhpl2UlRQ1eGWO2tzSx5kluxOoaybeCbYvU/48xnzzc3FppEY8teTbyaTHkVNJlLN0vl4217BLGZCzbck/G9ypm0+owXcW0vKKstS1s7TLmen1e1GqqNWYupz23uTn1F3uZ2XJbe5rjBoyztiU9zm7ORvJwjI8jsfgTEdAnRHRGIXEYiCF4IaCOYjLaFtDWC0MENCpIfIVMuGrzW4NBTALNNHIk4CdRxxwBxx1ggE2dZ1kWMzIvcs4m7KsHuWMb3I0F3E7LUGVMT2LMHRhSWU9gkxUWGmRSE2C2SQwhFT4KmXllvI9inle5cCpmfJTlyy3mexUk92b5CDiCSg4444A4444CcECcAEmNgxSJTJoXMcixCZQhIfCZjrIaGOY+M7M+E6HQyHPrBrymGplRTvyGpmVyaz3E99CFM7vJ/IWYZNyxCdmcp0x+LJ7h+Q0oPYcinjndFqD2KkBiex0jkE0UCmhOSNostAuNrgcDLyw3ZXcaZo5se9pFOcdy00imA0NaBaBJTQpoe0LaH0Awr+VR6jQL5F9DzmCP8AKo9LoFUEc38m+lZ+tvRL50P1jrExOiVy+4brX/JE+D41nx4rrbvIl7mS1bNTrL/lkjMaO3HxhfoaJSJoOEVZZDxxpWyW9zm6VIgRus6yDhGmzmyCGwDmA2E2C2ABJg413ZEjpMbpI92W/cd9Qq2tFCkjWxrgoaWFJGjjR5+72qh8OBsULih0ERFRY06tmhHaKKmmjvZbk6R0eOemmVXUzpPc871TLUGrNnVzpPc8t1TMnLtvk0qN1TTt2GnSExmF3bC4gbZ1i3MhT3DhrEWMTK8XwPwwnkdQV+/gXwGJlzT6aWSnJUvQbpNEo03u/VmljxJKkjDfmk+HwjHp1FUlS9ixDGl4GdoSVI5dbtOQKSXgKjjrI6ZaQ2C3FxQ6CPXaIyIXFVIbMWv1iak5PgOIpPehqYRRiJsBM6zSBGSVIS5E5Zb8iZSGSJy2KuWdIPJLYqZph1FqvnnyZmebcqLOfJdpclKabdk9Z326OTtZbw6pKjOnsxfxHHyH5tOXj0C1ardhLWQe1o869Q0uQP0p3yyL46f6ephnT82PjJM8ti1ji07f4mhp+oJ0myLnWTlbbxQmqaRna3o+HOm1Gn6rYs4dXGSW9/eW4TUlswnksPkrwXUuj6nStygnkgvRboxJXbTTTXKZ9YnhhkTTS3PP9W+zuLUJzxrsyeq8nX4/PPlTZY8KwkWNboM+jyOOWDrw0tiukdMsvwS9MiPgIjyPgTQahiFRGIikYcQmSIwMFhMGQ4ZUhUxsnsJmXARPkgmXJBZuRxy4OsYcQ2dZAE446yANzZx1HDAo3ZYhyIjyPhyRoLmJ7FmDsrYixAwpHxYxMVEYuCKQmQ2TYDYEDI9inlfJayPYqZfJcCpmZUlyy1lZUlyzfIccQuCSjScQSInHHHAHHHHAQiUCggAk9xkZUJQaZFgWIz2GxnRUTGKXuZ3JLiyDFkKamGpmdwfVxZAu8prIEshP4NZ79xmPJvyUviDMUraFcBtaeV0X8b2MvTT2Ro45XRnzhrKCAjug0gDmjqJZFiCvlhaKWWFNmlJJoq5oJlSis6Spi2WMsaK7GktgsNgMZGYF/Ko9Jol8i+h5zT75Uel0a+RHJ/Jqs/WzoVu37E691jYWiSSf0A17+Rr2K8H+rX+niOsO9Ql7Gei91Z3qvuKK5OzPxzpSGLZAIKywJsghs6wNJxFnWHA6yGziLEHNgNhMBjh8BJl3p8Ladc7lCVtpGz07HSRHkvMp/tr6eNJF3GivhVJFqCPP0syCHwQmJYxq2giou6dUkxmZ0mdhVRFaiVJnVmcjSeoytdkpPc8dr83fqHvstj0nVMqhCTvwePyz7srfqypO1hu+z4TGqVopwk7GxlsPhQ5uzopykkk23wkO0miy6lppOMPV+fob2j6bDCk0rflvlmW/LnClDR9OnOnktL0NvT6WGNJJJV6IsY8SiuKQ1RXg4d+a6VwEIJDEiUqJRiYWqIbJkA7AOs6yDgCYjsa2ERH4+D2FomKvcZkYi9yaR6YaYlMYmEM2yG9gLIb2Lh0rLK2JlLYnJNJttlLPqVFNWPqLRZsqinuZubM5NpEZMzm+dhTaSJ91AWt22+RU2kHKaS5K2Sd8F5yVLyNFTK3ZZnwVchrIkptsG6CYLKNHe0xkNQ0+REgGxfmU2rh1rg1v+01tJ1JNpNnknNryHi1Ti1uY78HfhyvoWDVRmluWlUl6nitH1FxaVm/o+oRnSs5NZuGkq1rNBi1EGpQTT9UeR6p9nZ4W56ZNrntZ7jHkjNbNEzxRmmmrL8fmuU3EvuPlTjKEnGSaa5TQyPB7fqnQcOqTko9s/DXJ5LV6DPosjjli68SS2Z3Z8k1Ed59KQxC0HEZmJnMFBAAsCQbQLQAqXAiY+SEzLhkS5IJktwWizSCccMJsizrOqxkjkmiapHNgaGRZzIAGR5HwK8SxAnQXMXBZhwVsXBZgc9I2IcQIhoikmyGS2AwBeR7FTL5LWR2irkfJcJTyvkqvdss5Xsyq3udGTScccMOJIJAnHHHCDjkccBJRIKCQBJKIJQgJMJMFBIkhphJgRJFwC7iVIE4XAYpe47FLcrIfi5QrPQaumlSRpYpcGRhbVGhhnwcuopp43sh64KeGV0Wk9iA6TBOe7IewG5sVkVoNsFuxhSzQKc1TNLIrTKOaNFQldi2G9hcnsUk3Su8qPT6NfIjy+kd5V9T1Wj/VRxfylZbWjXyv6CeouoP6D9J+oyv1L9Rl+D/Vr/Tw/VHerZURa6k71ciomdufjnEjrIshsoCsiwWyLGY7OtAdyBlPakPhwbn4RHcKs67Hwzu4BsFM5uw4BY1eRfU3tDGkjE06udnoNEqijn83wue2liWyLMUJxLZD4rY4aocUWsCtorxRc00dx5+nF1KolHVzpMuzdRMnXTpP2OtpfUec63mrHJJ7s8wmzW63m7snZfkzdPhyajLHFii5SfhePcrPqdc190WHHPJNQxxcpPhI9D07o6VTzrul6eEXOl9LhpcabVzfLa5NiGNRWyOLzfyPfMrkKw6eMEkktvYsKCRKVMOtrOK21YFsSmQzooQGdwddENgEN7Arc5s5eBQOaBewbewtjCYofDZMTHkclsz2Vl5HyITtsbkfIlbszpHJhJi09iHkUfJUM263F5syhF7lXNqkk6exl6nWubcYsfUXR+p1e7SZQnNzdt7AW3u2C5pLZjk6gbkkhc8iS5FTyVshdtvcuQCc22CccaRNLnwVspanwVc3JQJYLCkBIZgkBINgSGZOR7CW6G5GKZcODhmcHyaWk1zi1uZDIU3F7Mnfjmob3Wg6ldJs39PqI5Et1Z820mrcWtz0vT9e9k2ed5PDc30vr10UpIr63puLVYnCcE016C9NqlNLfc0MWRNclYosl+vn/VOh5tFNzxJzxenlGUtj6rn08c0Gmk7PG9c6FLG3m08fdpeTqzv/AKys/P8A/HnkwgUmm00014YS3KHUNAtBsFgCpLYRNFpoRkWzKlCs1uC0Ma3BaNFFtUQE0C0UEUEtiDrGEtkMg4DcR5JIXIwZAsY1uivDks4zPQW8fBYgV8fA+BhSOiGgIhoik58AthPgBgCsnBUystZHsVMrNMhVy+Sq+WWMr5K7e5vPgcuCSESUHEkEiJxxxwg5nHWcBOCQISAJRKIRKEBIJcAoJCAkSRElCJJxxIg5IsYUJirZYxqqJ0FrHsXcMnaKWNXRZg6Oen1pYZU0yzGZnY8niyxCd+TI11NPc5ioT2Du0AC3uQ2c3YLZUCJK0VM0bRabE5FaGGbkVMRN0W80aso5tmaQqsaJ3mX1PV6T9VHkunu8q+p63R/qo4P5f08NvSKoMrdS/Uf0LWk/UKvUv1H9DTw/6tb8eF6l/ncirexY6k/8rmVLO3PxzisFyBbIbL4BWQ5ANguVIrgHKfoBYDluSmVxcHbZKBvYJAoSJIRIhxZ0kW5Weg0iqKMTRR3X1N/TRpI4/NSXsaHxQnGtixBHGDIIv6eNIp41ukX8bUIW2kl5ZeJ7VPqczpMw9fNtOi5repYcaaTTfueb1vUZZpOMHsb2jepPTK1GH42qdLvbdJHouk9OWnxptLve7aX7BHStFbWbIvmfFrhHoIQSVJHJ5vNefmIzP7DGFLgKqDpHVaOKrCgm6VA1RFN7jDuWGkAtgkxBL2AbsN8Cm9xBz3J4IXKOY58DnwA2E+AWOB//2Q==\"]}" http://localhost:8866/predict/ocr_system
\ No newline at end of file
+curl -H "Content-Type:application/json" -X POST --data "{\"images\": [\"/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAA4KCwwLCQ4MCwwQDw4RFSMXFRMTFSsfIRojMy02NTItMTA4P1FFODxNPTAxRmBHTVRWW1xbN0RjamNYalFZW1f/2wBDAQ8QEBUSFSkXFylXOjE6V1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1f/wAARCAQABAADASIAAhEBAxEB/8QAGwAAAgMBAQEAAAAAAAAAAAAAAgMBBAUABgf/xABQEAACAgEDAwIDBAUGDAQEBQUAAQIRAwQhMQUSQVFhEyJxBoGR0RQyobHBIzNykpPhBxUkNEJERVJTVHODFkNV8CZWYoIlNTZjZEZ0hKPx/8QAGgEAAwEBAQEAAAAAAAAAAAAAAAECAwQFBv/EACkRAQEBAQADAQACAwEAAQUBAQABAhEDITESBEEiMlETYRQjQlJxsTP/2gAMAwEAAhEDEQA/AKOnzcbmlgzXW55zT5uNzT0+bjc83U4hvY8nuW8c0zHw5rS3L2HJujMNFPYmxMJWg7GR0HvuVNRDdquCxF7kZ4XuvKAMLVY7tUVI4anwa+fFu9iusSvgV+KiMa7YGL13NbhhT5ds3pqo36HktbN5tZklyk6X3D8WffT17VaaYaQxwphKGxv0i2rVCpfK6Za7Cvq40k/uHAX3EOfuKv3Och8A2wHIFyAlIqQddOQlu2S22yC4TjjjhhJyOOEEnEIkCSiUiEEhBKCQKQSJCUSuSCUInN0mLDm96AGbjjjgDgWERW4whIOKshIZBbitB2NFjGIgixAw0Z0CxDgrwHxexlTOTCsUmc5UQDHPbkVkyKEW2wZTSKOpzW+1P6l5z2laGWRzm5NhwnXkrJhpm1iV/HPjct4sleTMxzqi1jmZ6NpwmmWIOzNxz9y5jyGZrkRqK8JpjUyaqHI6iEyb2M6bm6CixTZ0WR0lzG7QyyrCVDVO/I5oLMJbDk7KkZUx8JJh01iA5MTDgaiouCTObOvYBvcKBWdYFnNkmJvYVJhNipy2JpFZJbFDNO5clnNOkzOnO5vc59+6ztOvYBsFSBcgxB0TYtshsBs3kHXSdiJ7ByYmcjWAExE2HNipM1kJXzCo8jcu6FpUyiXMD3NTA9kZGF7mnp3sjm8kONPC+C5BlDC+C9jdnHqNIswQ6IqHCHx4M+KEkEkciUPgcQ+AgWIAYDGMBoCA42LcRr2OaTHKSu0RQ5oGhkXRFDaIaKlBbiA4jqIaNpSIaIoa0DRtmpsDVHJBUC0dGamxxBJ1GvSC0QG0RQupRRFBUTQugNEUHR1FSgANBNUQaQnEM5ugGyglshsFshsmnwae4aYhS3GRkZbpmp7HNgp7HNnNrS3zrDkprc0NPm43MeEuC1hyNNbno6gehwZeNzQwZLrcwdPlutzS0+Tjc57A9BhlaHWUNLktLc0YpNCgRF7jm7iV5pwe/AUZ3tYEHLjTXAhYty09yEtxU4z9fF49NOS5rY8qsLTuS3PZa+HdBQ/Ex8mmp8F59KY6wtu2F8L2NB4WvBHwt+CujjPljrwUteqw37m1PFtwZHWEowjC927Kz7pa+MruIciGjjckNgu2FR1MACjqD7SaDoLo6hnbsQ4h0AOCcQaoA5EojclASUEgUEhBKCQKCEEkohHN0mIAbt2cccMOOOOAOOOCSAOirHRREIjkiLQ6KHwQuKGpGVMxDE6FoKzOgxPYiU6QtypCcmQJB12bLSdMpttu35CyTbYKN8ziUoJEIJIAOLLGOZWQyLoihexzLMJ0Z8JlnHMzsNoY8nuWYTvyZ2OZYx5K8kVXV+MrDcqRTWWhinfky0Omt2TF0wE7CT3MdULC4sJMVB7DER04YmPxt2IXI/Gi801uD2GpiYvYNM2izGwWyLIbGYrIbBsFsQS2JyT2JnKkVs2Sk9yLE1X1WSk9zP77Yerzcqyl37mX56ytXFP3Ocr8laOQPv2CZ9jprkC2LcyHI1kHUylsJkwmxTZrIYJMU2HJsVJmkIufIJ0mRYyNxumaGnnwZcXTLmDJvTMtzo62sMuC/hfBlaed0aWF7I49Z9tJV/Gx8HsVcbHwZncrPiEAmEmLhpOOIYcJDBYTBZNgC0DQTIognNWC4hE2PoKaOSGtENDlBbiC0NohqzaaIhoBrcc0BJGkpF0QyXsC2b50moOObORvKTqJolIJIOp4XRNDO06hdHAVQLCYLdFSjgGwWyWxbkayk5sW5EOQqU9y+gbkC5iXP3AeT3JtCx37hxmUXk9w8eUy2caCmS5bFWOT3GKVo4t3i4+dRssYk7FY4Nst4sb2PZ1SPwtpF/BNpoq4sbos44NM57TbWlyVW5r6fKmt2edwTUUrdF/DnT4ZAbj7ZR5TF9lPZmf8RuqZYx5pPZsOhYpolbOzoST/AFnRLj8radgSvlanJ7rbaivPGmMyY3bdNCX3xezH05S5YUxcsKQyWSa5Qmed8dodPpc4KMW3skjzHUJvPncktlsjc1eTJki48LyZk9P7DzvhX3WRLG0LcaNPJgrwVZ46fBtN9CrQSQbhTOUSukFRJURiiEok9BPac4FhQvwc4C/QVnGgHEtOApwKmgrtHUNcfYGi+gCCOSJoCcggUEIOOk9qOIfIBBxxwBxx1EgHJWMhHyRFWx8I7EWhKQaRyQxRM7TdFDEjkqCSJod4OJIeyJBeRlfJLahuR0ys3bNMxKDkckEiw5INApBIkJRKZCJQgbGVDYzEIJWibCW45GPhkooRkx8JEWGvQnbLOOZn457lrHKzHRrkXYxMrxkNi7ObRn43vQ5FeDpliLtEHDI8lrEitBblrHwaZVDkGmAgrNosVnNkENjCGwG6Jk6FTlQAE5bFDU5KTLU5bGZq50mHEarN1Oe5tWKWS/InK7yP6nJOx/mcYrSmMUyvBMakyOGb3HNgKwqsqKQ3YErGNASRcBMuBU2OkhMlsUCWwLDkqFspJkWOg2mmV0w1ImwNbS5baRsad2jzukb70b+nfyHNrPteV+DLEGU8b3LEGZ3LSLSYaYiLGJkfkzbOsFM6xWBzIfBNkMz1CCzjmcY0Oo6jiaEEJBpJnJBJDgKaBdoe0hbRYLaT4FyQxqhba8mmdETJC2Pkr4EyVG2dEg5AhI3mk0cdw6Big0i+k6iGgqIfAdBbQqew6QmfDDoJlITKZOR0ytkyV5LmiFKdCMmRIDJkq9ynky+5pNEfLN7inm9yrLMr5FPMvUq0Ljy7hwy7rcz/AIy8BwyNmej418eS+CxjkZeHIXMU7OLyQ48xjw+xdw4XtsOw6e62LsMKiraPSulEY8Xqthk6gtuQsk1BUt2Iacnb3IItzld2WdPntpN0xLx2gFFxewybunm3yy5H1T3MrSTlSVmhCTrcztNa76QKyu9mIlOlyBHKm+SoVi58RtU3YLVgQnY1NMVIl478CZ4b8F5RTBeOybQy8mBVwVMmn9jani9ivkw+xn+jYWTD7FLNhq9jdzYavYz80N2VnZsbJjp8AUXs2PkquNM6c67ABIfhwTyzUYpts7DieSSSVtnrugdH3U8kd36i1r/hydVNB9nZZIJuLbZQ6z02WgyxjKLXdxa5PoGs1uj6RpO/PNR22S3cn6JeWfPuqdRy9T1Us+VNK6hC/wBVen1J+fT1yeoy3ABx9iw4guJUqFSUPYW4FxxFygXNEquNHUOcAXD2L6CqOoY4kND6AENbhtEND6A0cTRNAAhJHJDIRthaE44eSwokQjSGxRjablEYokqIxIi0wpHUGkdRIA1QEnsMkqFTajFtjhKuaW9IUiW3KTbOSN56iXJEpHJBJCDkgkiEgiScSkQkGkAckEkcgkiQlIbFC0hsUTQbj8FrGV4ItY0c+zh8EPiheNFiK2ObSkpDoOgEglsR01jG9y1B7FLGyzGZrk4sJhJiVIJM2ipTbObAshsqKRNiJsKctxWR7BwrSsktmZOtns0aOSWzMnVu20NnpnqLbHQh7BQx+xYhAKgEMfsNWO/A7Hj24Hxxkq4qrES8deC4sfsc8fsB8UXBguBeeP2FSx+xQ4oyh7CJw5L84CZwKg4zskaENUy/khyUsiouJpbdDIO2Ka3DxrcdhNHRr5l9Tf06+RGFol8yN/AqgjC/VZPWzHQYpKxiTTJuWkWIsNMTGxqIuTNTJTATCRnYBWcyES+DHUAWcjmckc1CUgkjkgkqAOSDRCRI4ESFy4GNC5IsEzFSHSFSHAW20C6YTFsvN4kLjTOQSd7M6lyjaaI2C2CoHGxlGsoRQLQdENFygiXInI+R0+RE+BwKefhmdmyNNps0c3DMjVurKiScuar3KWbNV7gZsrbaRWdt22b5z/0CllbewKbb3ZFEpGhGxY6DqhER0DPQW8bplzDIoY2W8T2OXcNchiUFwBkl4Q+bb2SpCnD2N+qV+ze2d2V4LHaC4h0EqByxptUhqQ3DFNitIenx9tbF1R22QuMapFrHC0QqRnarI4Qe+7K2PM7VssdUxytNLZclCCaNp8TfrUxZdluWoZLMrHJplrHNqiaTShJMYnZTxz9yxCRjSMaTQrJBU9hqdoGStGdps7PDnYzNRjpvY280dmZuqhswl9mxMqqys4OU6SLmog+50i70npstTlTadWdUvIa10HpTyTWSa29z1up1Gm6RoJZ8u0YrZLlvwl7k6XT4tJpnKbUYwVtvZJep4XrvVp9U1dptafG2sSfn1bXv49hyf2q38xV1+tz9S1ctTqXcntGN2oL0RXSOSCSFagNENDKIaDoJaAaHtAtFSkQ4gOJYcQXEqaCu4kND3EFoqaBDQDQ9oBouUi69jqDaOSH00KNsdCFeDoQ3Hxj7Ea0ERiOiiFEYkZ2mlIYlsckFRBho5oLgGTSAiplLUZLfYnsuR2py9qaT3KSbbtm2J/abXIJI5IJFUnJEpHJEiCTkcgkhE5IJIhIJLYQSluEkckEkSEpDIIFIbBbk0HQRaxoTjRaxo59qh+NbFiKFQWw6K2OeriUgqJSJog3R2Y5MUluNRpgjExiYpOkSmbwzb2BlLYGwZPYo+hbsXke24disjVDJVzSqzOyK5F/Km2yp23IEUMIew+MPYmER0YioTCGy2LEYX4OxxVD4QEotY/Y5wvwWVDbg5wvwUao8fsKlD2Lzxi5Y/YAz54yvOHsaM8fOxXyQ9ioGZmhSexn5IbtmxmhaM/JCmXEVTcfY6Cpj5QoFKmFqWhoo7o3cK2Rj6GPBtYVwjFeViEdhiR0VsGluNaUg0iIxGpE0wpBJBUdRjonENhEMw3QElIitw4o5DEkEkckShwOo444oOfAuQbYtjBUhMuR0hUg6CmwGrDaA4H1IeAkyaOoP1wCT3Hp2hCDg6Ztjy/8ASMpkNDFUlaOaOrNlNWmitkVJl2aKuWDa4NIGbn4dGXqYOVm3lx87FLNiu9jSJ485nxU2ys0bWow2nsZmSDjJqjXNIijg2qBrcoCiNiLSoZEikfB0yzjfBUgyzjfBhuBuLH7AyxpIa3QuUti1lNJITOhk5CXK2BO8jcTp2KW7HQVIVC3jkm0XcdJXZmwaTsa8m1W0iOrl4nWtTtLcqw06rdBzyJKxa1K8sf6TRPAqOimmHHLGXkaqfoy+hEG0yxjlYtY74DUWmTqdLiwmSxaYzwZWEVNJplDUwtOjRkrRXyY3J8DmVRkYdI8+dRSu2ey6ZoIabCtt/JW6ToFB/Ekt3x7Iq/avrMtLheh0cqzzXzzX/lxfp7v9i39DfMV89sv7VdcWpnLp+kn/ACMZVlkntNrwvZPn1Z5pMDjaqoJMqo6NBoWmEmSBHHJnEhzQLQdHUPoLohxG0dQdBLiA4lhxAcSpolZxBase4gOPsXKCGgoxsY4B44Dug6EKSHKHsFGA1RM7TAoBpUEokpC6YUibCoFoAGToTOTpjZIp6vJ2R7VyyszpX0q5p903T2QKQKDRv8QlBpbkJBJEhKRyRKQSQiQkciUgkhdDkgkjkg4xJDkhiiSojIxEaFGxuOBMYDYImgcIljGhcEWIIx0o2CHRFRQ6JhVQaCohBGdNCW4aBC4ReQmzkwG9zrNoRtgzlQNsCVtlw0uQqbbGJAzVIoqrSVpi1DexvNhRhYkgjEZGIyOMaoewK47Ei1COwmEaZaxoUNyjvwF2hpBKIzIcAJQLTj7C5RHw1HJArTgaGSHsVskByEoZIWihkx7vY1skCrlx+xSaypwoUo7lzNDfgRGHzfeTWdaWijsjVxqqM/RRqjSiuCGmfi5CmhiW4nG6SLEGnVk3ShRiNUSYpDEkjK6kBbVA0MaIaMtb6AUQwmRRhrXQFLcNI5INIyNxJBIwg5nENgAtgSYTYEgAHyLkMYth0i2gaGtEULpBUSVENIlInpg7DlEakT2ocoBBtMekpK0LqgoumdHi8nPRBlATOBbaTVoVKJ35oUMmO72KeXHzsauSBUyw5NYGPnxWnsZGrw1bSPR5cfsZerxWnsXKmsJrwCkOyxqbQuty5UuSDRCVEoVBkfBYxvgrofj8GOibkpipz2Ankor5MteTSQ+jyZAE7YjubZYwxsL6B+ONjkjscNixHGZWrhShsc00h7SSM3W61Y7hDd/uCZ6fqJ1GRQg02rZQedXyVc+bJkk3Jv2RWlOSNJ4kVqLVKPL/AGljDrk3s9jzk5Sb3ZOPI4PZtF/+U4HssGrT8l3HkjI8jg1UlW5qafWN7N0Z3FhvQpJ8M5ppbFDDqU1yWo5b4ZHDFfhj9Ph75q1e4uEPiSSSNTDCGDE8mRpJK234KzBBarOtDpHKKTyNVBer9/ZHjdRpp5JynNuc5tuUny2/J6DJqFrMrk9vCXohctKpb0jVX15LUaBrdL9hmzjLHJpqj3GXRpqqMbqHTbi2luLvEWMBMNMVkjLFNwlymdGVjsI9MNMUmMTIpiRKR0QkRQ5IlIlImhdAaAcR1ENB0KziA4lhxBcC5QSoWx0YUgoQGKIroBURiRKiEkLphSCoJI5oYAyJNUS9hc2OArNNRi2/Bk5JvJNt/cWNblbfYn9SqkdGJydRUxDSBSGJFUhJBJEJDEiLQlIlRCSCSI6AqJKiFQSQugKiMSOSGxjYrQiMR0YnRikNSEaEhkUcosbGJNppgixBC4RHRRnQYkGgY8BrkyqhoIFMkzsNzYSewts5PcIQvJJyVnUbQOOaOONIEeAJvZjKsCadDCuluPxrgXGLssQg9mMGxhaDUAsK3pof2Kg4ZKhTHwQPbvY3Gtw4BVuHGNnVuMithqLa2FSiWGgGhhVnErzgXZRETiPhKU4lXLDk0JoqZY8gTLzQ3ERh8xeywFQhvwRUVc0kWki9FcFbTxpFuKEqH4VaofFOxGLZotRRl5J1Y436jVwBFDEji1CdRDCBlwSAs6iLCTFZ0nJEnHEqSccQ2qEHNgNnNgtgENgtnNgtkkhgslsgXQiiUjqCS3AnJEpHJBJAaEiUjkgkgAWiBlbANDl4B434YUoilsyxH5o+56Hh32cJWyQK2SBflEr5IbHXAzMsPYz9RjtPY2MkFuUc0Nmi4VeZ1mOm3RUo2dbj2exjS2bTKiHJEpEJhJhQKI7EITHY3uZ6JbyZBDk2yJO2TCLb4NgZii2y/gxitPiutjSw4qXBlqrkFjhQ9RSW4UIJIr6vMscaT3fCI4v4r67UKEe2D3f7DFnBuTb3b5Ls25ybe7YLha4NZ6SzsmPbgrzh7GpPHsynkh7FSkz5woU0XMkBEo+xpKSMWRp1Zfw5OChDDOc6Sb+42tB0rPmaSg37JWTvUh8Nw5Xao1dI5zaVb2O03Qpxp5HGH1e/4I19P07HiaeOSk63/uMf1KfB6PAopNoo9S16yZXgxv8Ak4um15fp9w/q2remwLBhdZJrleF5ZhQT2K+CrsObWxpaWamkmZmHwX8Kppi/RxfenTVoo6rSJp7GrgdxSYWTEpLgOr518765oGk5xVNbnn4y9z6P1bSKWKSrlM+cZ4PHnnGuJNF4vfTKzlNjIbGRVixsZDuSWkw0yvGQ1MysM9MJCkw0zOwDpENHJnNiAWiErZLDih9DlEJRCSCSF00JBJEpBUUYKIaDaAeyHATIq6jIoQbfJan5MnWZHOdJ7I0xO1NV23KTb5ZyRyQSR0dJyQxEJBpEkJIZFAxQ2KM7QlINIlIJIkBo6hiRPaLpIih0UDBDlERuihkYnRiNSJ6aYwGxidBDEhG5INIhIYkTQlIk6jjOmlOie8GjqM6QnNe5yasVLYFSpkdLq0mSwIu0d3GuaYyVuL7g47s1hjSO7LDhG1wNjEuAhYqd0OjBDFAJRrwUYVGmPS2AS3GpbIfACgocktHRVMDMrcYlsAuQ0hhzQLQxoFoZkyQjIi1JCJoCVZorZY3ZelEr5IipM3JACEN+C1khuRjhuRU03FGkPigYRpDYoSoKOzsuY90VUixifCFqdij0g0tgUrDOTcCHwC+AmC+DCkU3TJTIkAnuIj07JATDsVhuIbJbAbIMLYDYTYDYqSG6BbObIJDjkcSlYglINI5IJIAGgkiaOGEUSjjkhhKRDVhImrQGVVMZjdMFomDpm3i1ylTpRtCMkS0t4iprk9TJKGSPJTzQNPJHkp5YqmawMPW4009jz2ojU2er1ULTR5vX46k2kOfWdUkwkxaYSdDpGJ2NxvdCUxkHuiNA9Jt0XMGFunQOHDbto0sOKkth60cgsGKq2L2KFLgHFj9hzahG2ZNJAZprHDky815JNssZZvLPnZcAONlT0FXs9jnDYsOCSAmkkMKs1syllStl3I+SnNNtlJqrODb2LGj6bk1M0oxbv2Lmg0Pxp3LaK5PTabHDDjUMcUvV+WZeTzfn1DkVND0LBp0pZvml6J/vZqRrGuzHFQj6LYNJqF+oF03fhHHrd0sUWlswlJpqmKW6JttilAs2n0+pd5ItT47k939RD6ZhrbI0/eP5D1LcNyvdfgbTdKqa0Tg9pRa9nX7xuODhVtfiFJ7i75L/AHSXMWaMOWizHVYmqt/gZKYakH7sOVc1EIaiDjFpv04PAdZ6NnwanLN432ttp0e5g73sbUckHjyRU4PamrQTzXNFnb18kljcHTRKdHuOtfZmM4yz6JXStw8/3nismN48jg0006aZ153Nz0mzgosbFiExkWFiVhMNMQmGmZWGcmTYtMJOyeAa3YyKBghqWxFMSQSWxCQSQzckTRKRzHAFi5cDGLlsrKgVdTPsg65ZlSTbt+S5qJOc36IS4extn0XCVE5RGqBKgV+i4BIYkSo+wxRJ6XERQ2KISGJEhKQSRyQaQg6jq2CSCS9hEGCpliC2FxjuOhsIxpBpWQqYaW5IMihiQMEMSBUcluGkQkFZNCTkjluSkkjOmitgWE2A2Z1IZMU3TDkwGZkfjlaDe6K+N0x1muKHDMbdi7GQdM2hruGmhqjuV8Mi1FplxUSlsSkEcluWHKNsZVI6KCZRhaQKW4TOXIASW41IBLcYhhzRDQdEUBlSQqSLDQqSAK0kIyRLbQmcdhFVOcPYiENx8obnRgTYTlGkGkEkSkTwOSHQ5QpKhsOSapZg9ggIMMw3AhgvgIFo5bCLmthfDGtCmqYiHFhp7C4sO0KhLYDZNgtmdMLYDZLYLZJIbOOOSJDkhqQKQ1IIbktiaJomigGjqCaIAIomjqOoDcEuAUShwOaBWzDkC+TTP0U7G9iZoDGxr3R6XjvYlVyRKmSHOxoSiV8kPY3gZOox2mef6lg2bSZ6vNju9jI1uDuTVchU2PIuDTJpl3NgcZvYV8Pfgf6QQkxsE7C7A1CmTaG7hw1Tou48fGx2LHxsWoQSVsn61kcoqCtlLUZXOTiuEx2pzf6EXv5KqjuMV0IBNJBpUrBkAKkV8jHzYie7oCtV5pt0kN0+geRqUlS9SxpdL3y7pLZGpCKpJKktkjPfk58JXw4VBUlVcGhhg2k3x6iEqkki3Dwlx4OXV6qGtqkk7oTJ0n6tpf8Av8B3lX4FZIUt/WyDCpUqOnJKKpbt8+iBStr08nNqTTTHPQFB7DE1W7S+oreqRL3W/gqUCat7CpNXQabi7TByKm2zWeyAEmqpANtkwTKI/FzbZZTpor49hyd72RqdUen9x8++12mjh65kcIpLJCOSlxbVP9qPoFppU79TxX2wTfVoeawR/ezX+P6tgvx5dbMNM6caYKdHX9ZnJhpiE6GKRNhnJjI8iIvcfBbGd9A6I5IVAcjPhjQUQUEhmkhknMDAyvqZ1ClyyzLZNlDK3ObfjwOBX7LOcByj7EuGxX6HFfsJUBvYEoh+iLUAuwYo7E9vsL9ClJBpE9pKVDlQJINIhDEgDlENQ2Jig0g6ApUSlRLVHAQkxikJQaewjWISHJlSLpjovYRnWSmLTCuvJJmJktinM5zIpdE2C2C3ZDZnSQ2QzmyGZ0kxe41OxCdBplZoOTCT3Fp2EnubQ1nFOmi7CdpGdBlnHOjTI7xcTDiyvGVjlI0V05NBXYpMK9ijSyUgVuw4oOgcUNSASDQ1RNHUSiSjLaFtD2hbQAhoVJFloVNCKqrRKiMcdzkiEho6g6IoQQFF7kM5OmTVHxYxMTB7IamYahDIfBKOZz6gKaFyW45oXJGJAiFYLRAf0BNgtnWC2Z00NkEPkkikhchpEJBpCMUUGlsQkGijdRKRxwwhkBUdQwA4JgsOE45MiyU7HIBWC3ucCzSQxRdMsxdopp0yzhdqjr8N/pImthU4WWKBcDthqGSHsUNRhtPY2Z47KmXFa4Ck8zqtMm26KE8FeD0uowXexm5sG/BlfSLGO8dHLHuXp4a8AfC9iLpPHosUElYGoy9ipcsLLkWODKVucm2ateoSbdvew0g4w2CcaAAapCpDZCpACp8k4cDm7a2XLCjBzkkkXsUKSgl9fcz3vkLjseNNJJUlwOhBxW/kOMFBK02wlb3s5e9+mrJNNvyWcLSSTFZF89LikxsE2tgohydsjLODpKS2VMVKV7J7fvBeyF8PostRikmnau16C4LYnLVQS5r+IMW6YX6Q7aYSdqxSm3yEpMQG3aoGdtv8Ak4xdy2fhLkU8iT4b+81yEqL9AorcLFOE0+U0cpK9lRqRidLZBJ3yAmgkrYudM2LPLfaPF8fqM5x3UYRh+C/vPTO4Qb8+DD1OFubb3bbdmmJ+Q8nmwSTexVnBp8Hpc2nTvayll0ifKN5SYlkpl7Jo0m6Ql6emV2JRj3LURUcbQ2KoyvszoDkKhwNiSoaCQKJEBHAnN0m34BUJ1E6VJ7srpBSl3zbJSEEKJPaMSJUbJtMnsCUB3Z7Hdov0RXaT2jVAJQCaKq7iC1Q9xFtFSooE9xsGBQUUaEfFoZETENMXAN8AnWcMOCTAs6wI1NBqVFbuZEs3Zt5FwdXHkSRHxGygszb3YxZLDhdXFMLvKin7jFMzsPqx3HWKUgk7MaBnHI4zoQ0ctmS0dQ8gSYabYuIcTbIMi6HQl7iIhxZtIS1Gb9R8JtlOLofBmkHVqMg+5sRGXgbHkVq4bEbFiUw4sjqj0GhSYSkaSnDVySAmGmaRSGrBaGAtACmhc0OaAkhAhrclLYOiKEktoig2gGqJoCwWwpAMimbCXA6LKkHuWIOzPRVYRzIjwSc2wFoBoYwWjnoKaoAZJbC2IkMFslsBtEUJOBslMgDSGRQtMYnuBmJBIFMIo3HEnDCDiSG6AIYDZLe4LY4SLJTBIsuEbdgs5M5lHAN7ljC9xD5GYnTRv4rypq6laJ7SIMNLY9HM7FwtwEZMfOxcaAlHYdgZWbF7Gfnw87G5lgmUs2PnYy1E1h5MVXsV3jp8GtmxV4Ks8e5z6RSp5Hlnb48DMa3EQRZgjoVD4LYlrY6DpHSaAyZIU4tvYbJ2M08E5W+EK3kBunwqMG3y/2B6dVl39Tm6V8K6OTcZJ+5y6ttB7VvcjjwMcGla45FN2zO+jBNXNP1SGNqEK8vn6HOOyk/Gwmc23uvvKpJbV7uvY5StpeoMVb3Z04yim0t3shSdI2MHkba8C5PtlXk7DknFdtk0q726Xq/I+AuMXJ7Ic6ikk7fqLU1dJUv3k3u0KhNb292DNWvcN290RV7tbFZFRh+WL9WGuQU/YZFbbmsAluPhG2LgrfFDmnBG2MdMGZOrW6Xgz9RFNX5RpKaunwVtVhTTcXya3IZMsalwJyae72Cc5Y5tNPZj8eSMlyhQRl5tM6exUlgab2N+UItFXNiVPgo+MSWOmQolvNBJiWiKkMUNQCW4aJMaOORDaFwJvcRqMlLsXnkPJNQi23wUXNzm2/IcB0ENSFwHRVkWrElYxROhEcomN0C1AnsHKJ3YR+gUoBdtIYoexLjsOUK00qEtIsTRXa3Nc1nQ0ckjmBbTNoR8Q1wIjJjFMZDbIbBsFtoAO9iLBTsDJkUFtyPhW8FkyKCpbsrNtu2C5Nu3ySh8SKwlJgpBJADYzGxkISGRM9GsRY2IiDHxZhRDEEgUGkZm6jqCo6ghhSDRFHV4N8lRpoJMWkMijaQjFwNi6FKkglIslrG99xyZShPcepoy3VSrHfQSmVXMF5kvJh+l9X1k2CU7ZmrUJurHQzJ+TfOhLGlCVjYso48ifks452kbSrlWCHuCmTZZoaAaDbRD3EC6IaDoholJTQDGtC2hUFSFsa0LaM6YU6Y/Gyve47GzPRLUWGKg9hlnLuhwLCIZhQW/IqWw1oTk2ERcnQDkROW4mUyKDu73OUiv8QlZERyl1bUhiZUjMbGYlLaYaZWUw1MfTPtHX7iu8jvK6DWwXIW5kOQdA2wWyLOsqJcccSkXA5BN7EJEtbC6YXyFB00C0FDZm3jvtK7B7IchGN7Icmer4/ioIhok42s9Gr5EVcsbTZemtirkXJjqCs7MuSpNJ8ov5o8lLIqbObeWdZ0Gh8GinjmPjP3NuHKtKdIBzFd69QXPfkAcnbSLeOPbD3Ken+af0LabSZj5aYnNdiTfDGpwcLvcqZG+2l6jdO22kzEursZpQ7WrdKmKa+ZUdGXdJvx4JnLsV+Xt9xP2mjJNJJK6X7xE5Jpvcmc2luhE8ndshptNhk4SSLGO8kHa4K2KLpbDpzah2wdp8jgA3HG3St/sQuTc2m3Zz7m9w4xbVJWxhCTfgNQk+FwNjCKVPd+gxb7cL0QvUPhaSiqk69lucnF7U6GSSfhEpKg6YKjXyp37kxg29wkqdoKN3uP9A1VDC37pHKSkqFa2fZom/LaX7Srh1F+Tq8e5JylVjIux+xDfdBq/ATmpwabKiy9mVwb54NrofWNqpzx6ia5V7CPjtO0qZe1+LubaW5nVZERPXo9a2aW6sDJre5U4NA9guWL2BcJyZO5tgW2xjx0R2UKmhINIhIIkIbpC5S3CkyvmyKEGx8K0jV5rfYn9ReMQm5zbfksY1wVZyFKt41wWYIrYy3i8HLtcNhEcogwQ+KOfVUFRontQyiKI6ApAyVINgT4KlKq8xEkPn5ETNsopTAb3DkKkdGUiTDTFRCTKI1MGb2ITsHNNRj7gKGeRQW27ENtu3yA2222SmXEDXIxICCsfCNgblEJQYyMBij7CMlRDSGdm3ALVE0xQHQER5Hw8GGoDojEgIIdFbGVORCRKQVAt0gydgXSIvc57s7g6MoqUEmwUworc3hDTbCSbOirGRiTdAKTRNtDO0hxMdU5CpZJJFfJkk/JZnEr5ImR8VpZJJ2mNw6tppNiZoS072LiL6b+n1CaW5fxZb8nmNPmlBpXsa+n1KaW5tnTTOmzGdh91+SljyJpbjlkXqbdayn2dYrvXqSpr1AdMRDYKkc2InMBom7OfBNoKkqEyHyEyMrQW9gsbpgMhSp0ZapLsGNTsq45jlKzk3ozbIZydokzMEivk8lmRWy+Q4lUyvkp5J0WMzasoZ5tDmb1Fovjb8hLKn5M95KfBHx2vBp/5o/TWjl9x0cl+TGjqq5Y6GqT8oz14ac02Fl9wllRlx1Ka5GLUJ+TK+Ormml8U74vuZ36QvU741+Q/FP9NFZPclTvyUI5vcbCdl4wOrilYaZXhKx8WVYY0GlYMRkUIOSJa2CS2IfAqotkLZhSIXJt4/qKuY+ENTFYv1UG9j1cfDhqOATCs36oM+CtNFmb2K8zHVCplXJRzR3NDIU80bsx37RXmoTXqMWTbkpKYXxPc34jq58S/J3xPcqfE9zlk9xcHWxoZKWSvVFzJ8sbMfQ5Wsya9TXlNThsc3lnKuUt7pb2m2PxxqDa54FY43Gq3THxi6S8WZUjcKVW/HIOR97brYJukor7xbe/JBgcbdNELCk+5ukOSSVtANPI+H9C4XC8k7+WCpefcLGm1XqMjihHebS9vIfeo/qKvd8itg46GBtW9l7hNKCqK29Xyxbm5Ldslzn4b+/cP1DTzvwSpNHQkmt4/hsEpwXMH+IumG29mHBMJODWyd+50Y27YreBMUm95V9UM7Uqpp36Edqex0mscHKTSS5HL01LqmbaGBPj5n/AoY5tPkXqM/xs8p3s3t9BayUzPW7dItaWPI/UVqotpTXKEY83uP8AiKSps68atg6VKay4/fyUMmOpMtbwyNeGdkgmrRvCVlElwtBpBUBxWljFOHsXXEXKHsKqUmqBeyH5IUV5sQLk9jN1eXul2J7LkuajIscG2/BlW5Sbfk0zEWmY0WcYjGixBcE6Czj8FvG+Cpj8FrFycu1SrcCxErY2WE9jl0rojjrOJHUMXMY+BOQrJEzYie46YmR05iCZeRbe4cxfk6ISbOsglIYSnSsTmlfLGTdIrZHbCFQ2EmAuQ48lkdBF3FC0injW6L+DdCpmKG3BPb7FiMLWxPwn6CCuoWBNUy44Utyrme7QqZS5LGNW0V4clrEuDHYWMa2G8ICOyCbMFxLYt7snlnNUXCqGqIe5zYFm+YimJbbhxpCE7Y2JpSPh4HREwGJmWqZiRzSoHuIcjOyqRNIrzSGzkV5uyplJORFdofJ2A0XMkUlTH48jg9mLogfA1cGrTpN7lyGdNco8+p0xsNRJcMcqpqt9Zk/ISyr1MaGrfD3Gx1N+R/o/02Fk25J77fJn49Qmqscsl+R/pUvVxS9znIrqfuGp2TaYmxUmE5C5SMtUAkxMpU7CnIr5J+5z60m1ahkXqWYTvyY8c1Sqy7gyX5OTdomutOLtBplfHOxyYSrE+CvlWzLArIrNsTpM3NHkoZ4WmauWF2Us0NnsdU8fpnWNmi0yu3TL+ojyUMipsJOemYXIHva8gNguRfAcs04vZhLVzXLKzZF2P8yhdjq29nY6OpbM+BZxRtmes5gX8eZtF3DJumUcMN1saWGGyMbJGkWcSbRbxoRjjSRZgtjKtDIoYlRCQRNNBzOIkQASIXJz5Jitzo8c9pq3iWyDa2BxLZBvg9THw58CnTDTFPk5MfeGObK82MnIS3ZnqmXk4ZWmrLM+CvNGeql4VSIeQU3SFObvk7eMVr4hPxPcpPId8Rsf5HW50rU48erg8iTVq79D0HwXCbS3i+GjyOgwzzZV23zyez0EksUMWR2kqTZy+bHv00z79Bxwp+eS04dqT4LkNPBNNJOwdRicpUuEYaxyL4zZRbfqMhh+W5NL6j2oY1vuytklOb8v0SRl8J0pwi6tt+iQt5W3SVL2OcG93F/gCoO6apB2knu3DW68L6sBppktNonhOU1foNSTSdiXB1xuMhBpchw4NUkSqYNV7nbv1DhmJK9hkbSFQtPcct1uAEm6syOr6pxrAnu95eyNrHDudM8nrJvJrs8m7fe19ydFc9dK30FPY5tkRCMkOUqY7HPcrvZhQnub+LXBVxx7o2uTou1T5RGKVqiZLtla4Z1w4XKNM5IdJWrAoZgohxsZRzQlKmSGxRzKmzUyRtGZrWscG/IFWLrcnfPtXC5ExQU4tybfkmKNfkSOCHwQqCHwRlqg7Hwi1jK8CxA5tqixB0PTK8WOjwc+jNTslAomzMJb2E5BjFyNMxNJkhM0OkKmdOSV5q2La3HtJsXNpG0AEqOcq4AcgWxl1E3Yp7sJsFjhBGRW4KQcSiNhs0XsLqilj5RdwqyTXsc9h3ekitjdIJypCNOTJeyKeR2xrbK83vQGPHyWYNIqRdDoS9DLUJbUwlbFQtvcamkZ2GYlSBbIcgWy85K0LYDZLYDds6c5RRpjYsQnTGKQWCLEXQ1S2KykF3UZWKPcwHMU5sFzDgHKd+RUnZDkA2VISWQyGyLGHNAs5sCTF0wydAfEp8gzkJnKiQtxye46GT3MxZGnyWIZL8kk0YZK3TLWPPeze5lQyDoz9wDXjlvyNWTbkyoZmvcsRzprncm1U0vPJtyLlMR8VeouWVGej6bkmknuVcmS/JGTJfkrymYWdqbXSm07TLemzXSvcz2zoZHCSaew7j9RPXpMOS1yXIStGNpMykk7NPFO0jl+XjbN6tp2RNAxYxUzq8HumqZIFPNDZmnkgU88Nj0s59M6xtRDkzM8KZs6iG7M3PC7M9ZZ1mTVMCx2WNMQ1uTA6zkcluMjBtjoTji20i/hhwKw468F/DDg597VIbhhwX8MdkIxQ4LuKJhdNIdjWxYghUEPiqM+qGkSQiRKQc+CQZDzCoGtyYLc5h41bOnxz2mrONUkGwYbII9HPxU+FSAbYcxUnsTokSdoW3sQ5bkXZyXfsOk9hTQxgtWRdh83nKlQlsObti2exHOgbp8TyTSSu2BFOTpG90rSKEFmkvoLV5Di7o8MdNiSpdzW7LX6Q4JteCtOV8C2273Oa6n1rn60undWlPIoNNb+Xf3G3k1D7VS3Z4jHHLi1iml8vLbeyL8PtFppNQbbadW1S/EjXdT/ABdHk3nk43ZOc23bX0R3w3VuTf3mbDqOmyQtZOyXuPx65zjTmpJPk4tW5+sVlxl4b/E5d6VNdy9yMeoi/Kv1Y2+5XyvVEf8AoAKMKdxa+m5zhBJNNtPykNST2fDJhBQtPdMf76fCU41tGvd7nKUrq7X0DcU22qa9ga3D9UhRipJWmvoH2wW3dX1REPU6St7D6Y2oJXd/RALm07RyVqrIUWraY+9JZxOmjxuSXfnyT/3pt/i2eo1Gpjg02TI3TUHW/LrY8pB7JFX1lOjY8B+AUEZVIZIWnTHPgRk23KzRVnFP3LSqUTNxzpl3DO0duL2CU6D5TIapnPZpoJ7qzRQCUcyFyJSJrYxtenOVLhGzmlUGZeWFttgVY2THuKSpmhmx1exWlCmPpBithsEAkNitiLQbEdB1QlDYvgx0FmDHxK2NliHBz6M1cHApkpkBL4FTYbdCps0zElTdJim2xrVgOO3BvKRT4EZG2WJqkV8iNMik2yGgqINSA0RQTRFCJFBIgJKygdiW5dwlXDFsu440kKnDk9jnujkiaEC5cFTI9y1ldIpTdsAOLbZYx0irF0NjKiLAuRnQantyVFMNTFMDqx3nOViVImzXOeFaJsgizkzRIkGmAmdYrQYmT3e4ruOciOGY5+4LmLcwHMRmOTIchTmC5i6DXMhzEuYLmL2DnP3FSnYDmLcxAU5CZSs5y9xbYBzYUMlPkU2Qm0w4F6GS/I/HkM6E2mPhNisJpKdhKXuU4ZGh8ZpoiwH/ABH6shzb8gJnMnhJcmLbYTBbIuTC2A2EwGxSEt6LNT7Wzc0+S0jzGOThNNG1o81pbmHmzy9Xi/024OxiZWxTtIsRdoXh1zTWibtCM0bTLCVoCcU0ex472IrH1EOTNzQ5N3UY7sytRjpseozsZGaG/BVcGaeSFsrvHvwc2vVJVhjbZaxYuNgsePfgt48fGxhvyKkDjx+xbxQqjoYyxjhucutdXIZigW4RF447IsQVGfVjiqGoWg0yoY0zgLJTKMT4Bb3JvYBvc0zkqkZjVsUnbH4ludPjiKsxWxzJXBDO2NP6KnwIm9h+R7Mp5J8onXwqBu2Gt0V+7cdBnneT6QmgaGNWgWjC0PmD5Bq3RL5H6XTyzZEknufQ94wix0/RvNNN7Jbtm41SUI7JbIjBjWHEscVv5fqXdLp+6SbWxy70uTpWHSTyOqe5HU3pumadZdRJuUm1DHHmb9vb1Zp63VYOmaOWozv5Y7KK5k/CXuz571DXZ+o6uep1D+aWyiuILwl7fvH4/H+vdXqzLtbrsurm20oY3xCL2S935EJ0BZ1nVJJORj1rdNzY5zWLPPtT4k/D9/Y159P1GOpY22mrTTtNezPKKTTtM0+n9Z1OjqCffi8wk7X3ehy+bwXXvKpZ/bYhqNThVNdyXryWtN1SKlWTug/VcEaTrHTtWuzK1ik/GRbX7P8AMt5Om4ckO+Oye6a3TPN3jn+2eNJL/S7g1Smk4yUl61ZdxzU1TSTPMz0Gp01zwybS3+V/wLWi6o01DOkn/vIy5Z8V3/raniafciFBT42flDceRZIpppprZrydOBUPhLg0/wC8hbOiZJqRFpcumOASTTtIRq9Rj0uJzyOl4XlkZtdDC+y05enoHhyafWUs2KE6WzqmvvOnxeOa+peV1euy6zLcl2wT2j/EiB6bWdAw5oOemdSrZN8/eYGXS5dPNxyQar1Q/L47lFlQgkCgkzlpJasTNWhwMlsEoqpumWcM6pMVNbnRdM6fHomnGVxJi/BWw5LS3HXTs6VSjewKe+5zdoCcu1N+QUVnncqXCESVoK222/J1WCVXLBNPYp5MdM1Jw2KuXH7CUoVTDQcoUClTIpDQ2PIpIYuTOg+BYiyvAfEx1AajjkcZhDBasOjqLlBTiA1Q5oVLg0ySvkRWyItTTK00b5STRDQVEM0AGCwmD5AOQ3HG2RCDb4LmHFxsUOGYYcFuMKR2HHSLCjSDiiWqIaGNC5OkxEraiVKii5bj9Tk3pFS7YxTVINSEJjE7FxJykMTEJhplQj0wkxKewSkUDbJsV3HOZNoN7iO4U5kOZINciHMU5gOQga5gubFtkNhwxuTIcgHIFyDgG5AOQLn7i3KxAbmC2DbIsOBLZDOJAAaBYxgtAA3Q3HMVW5y2Cku45FiDooY5lqErrcmhbi7D5FQaY1cEU3NC2hzQLRFJXYLdDZIU0SQWy3os1PtbKTsiM3CSafAaz+pwPV6bLaRfxys8/odQpJOzZwztJnDqXGm2b2LyaOfAuMg7O/w+aWCxXyrZmfnxppmlkWxUyRtHZ+pUWMjLjp8CHDfg08uO/BWePcw8ntMIhj3LGOBMYDoxPP8AJ2NIKEdh2NUBFDUqOdfDYjExSdBphAcmTYtS2OczWGZ3HKW4lzOjO2bZyD29iLBTtHI3zkhx5LWJFeCstY1sjfETPpy4IZKIlwdLUjK6RQzSpl3M6RmaidNka+JqE7Y/Gynjlb3LMGcHlhLKZLQtMYmclD5ljxuckkrs3tFp1gxptfO1v7IraDSqCWSat+EzRgnN0lye75NspDtPjeSSVbGr34dJp5Zs01DHBXKT8IVpMUcWNzm0klbbdJL1PIdf6w+o5fhYG1pMbuPjvf8AvP29F95jjH7rTv5it1nqmXqmr+LJOGGFrFjf+ivV+78/gZzZzYLO2TjG3qWzrBvcmxhNkpg2dbAGxk15Lem6hq9K70+ecPVJ7P7uCgmGnZnqS/Tlr0uk+1OeDrVYYZV6w+Rr+DLmfqXR9VgnlWoWHMlahKDTb9Ntn9x49cBJnNrwYt7xf6r2PSOrYlNY1kcovw9mvpZ6iE1OKaaaa2o+Uwk4tNOmbeg+0Wt0kFBqGbGuFO019GjDf8f33K5p7TVZIaeEsuaahjirbfCR5XV/aSWbMsWix9mNunkny/ovH3lPrHW59TwYsbxfC7W3NJ2pPx77bmXpd830Rfj8MzO0rpqx1E3O2223u2+TS0mplBppmPjTbs0dPBuib/j8R7eu6dqviQSb3LWo0uHVY3HJFO1yuUYvTpuDSaN2ErSadpm/i8ks5Ws9x43XaSei1MsUt0ncX4aEI9j1HRQ1unaarIk+x+jPGq1s1TWzXozl/keH/wA72fKjU4KznwRZDZypLmLsPI9iu5U+TbCas4p00W1O0mZsJOy1jntydk+HFhT25FTl3beBUslOrJi7GpLVExJq0clQzS1aFZIWh0TnGyTjPyY/YQ4UzRyQvwV8mOiaKrpErkJqgVyZ2JPxssQZXxj4+DLUM5HApk2Z2AVo4hBUEAGhbQ5oBo2zCVpxK+SJcnB+gmWNvwbZLim0A0y28Lfg5YN+DUuKfY34DhhbfBdjg9hsMG/AHxXx4eNi5hw1Ww3HiS8DkkkM0RikiXRLdAsAhorZ2op7lmTSW5la3OraTAKeedzdC0C3bthIE0xBIFBICGmGmAkGVCokzlIE4fAOyLBvY6w4BNkNgtgti4Y7BsBshyDgG2C5AOQLmSDHIBzAcrIsQE2DZDZDYAVnWDZyYgJMNcC0GmATRzRKOJoLaBGtANB0gp0x+OYiqCTaYWE0McyzjlZnY5+5axz2RnTXE7RzVi4SsYTTBJCZIsNCpIikrSQtofNCpIqUjdHmePIk3s2ej0uW0tzybbTTXKZtdN1HfFW90Yefx9n6i83lejxytDU7KeGdpFqL2OOWytnSViMkOSz4BlC0dXj89nqosUMkLK8se5ozx+wiWPfg6pv9J4qqFPgYojFD2CUDHeenAJBUM7H6AtUc+vHVORKdAWQ3RExTNcgHP3FudC3M2zkz3PbkKMtr9So52Pwu2jpzkluPASQKXA2Ks24Dca4LUFQjGuCxHg0yJPYgJMNsXN0jRdVsz2MrUS3NDPLZ7mTqZfMTUJxumWYSKcGWMcjk8kC5F2hiZXg9hqZwaDy0FNtJfga2i09JOS3E6HTNu2in9oeq/AhLQaSVZGqyzT/UXovd+fRHr5l3Uz1O1T+0XWFqG9DpJ/yEXWWae02vC9l+1nnZMNpJUlSXCFvc7MyZnIzttoWQEyCiQQ0ScBoOs5o4QTe4SYByZNBqYaFRGIimYibATJsngS2N0jXx1b52ENjNOn8RVzYrPQjcw41fBo6eKikqK+GKci5BJHFr3Vxf07uq5NrTu4IxNI/nRvYUuxNehXhz7XDVweG1tLXalRVL4sq/FnuVweL6vgeDqeePicu9e6e/77Or+XO+OJ18VDmzrIfB5bMuZVmty1NWhM4mmaRadD4TryISDTo68Xog8rtJoLFPjcVN2heOdOrLsXGnFpo5oXilaHVYjQmGlYDQyKtAAuNickNuC1QLjfgXDZ08b9AFjd8F+eP2A+HXgmwuERg14GqLSGKHsEoGdyC6YSQzsCULIuDLUbGKFhxh7DVjHMAjsI+GWuxehzgvQ0mRxUeNegDxr0LjhZHw78GkhKaxW+Alp9uC6sVeCXBJFcCmsaXgJRSGSQND4HHHHUUbiG6W5zaStlPV6uOOLV7gXA6zUqEWkzDy5Xkm3e1kajUvLN77Ck7DnCtOTsZEVEdEhAkg0iEg0hwCSJOSJaLhIIslgtlcDrIbBbIbACbBcgHIFsRibIcgHIGxUCciG7IOJ4HWc2RZzEHWQ2cyGAdZ1kEpgBphpi0w4k0GImtgUGiAhrYFoZRDQugpoihjRyQ+k6Ow+EmhSQyKZFC1Cew+MtipBjoyI6DrtANEpkvcmwyJLcVJFpxsXKHsECpJDdFlePMlezJcAHGnaNJP1OHx6jSZbitzRxytI8/03O5JLyje06bS2OPXg1+uSNcnrdBUHHDNJNpkpNco6PF/A3v76FKcLeyDjo3JW0l9RmNpTTaLq4PS8X8HOf8Aa9DKyaRwfH3k4dL8S7aVF7NYiE1jbbNp/GxKXBR0WOnbbfsIz6FX8s69mPepaultQtZviNNO7Zp/44s+Gqrp2V7qmvqKy6TLjXzRa9zbxP5UE0mt0Z3+L47PQeWyQaK03TPT6rRY80G0kpeK8nn9Xpp4pNSTTXqcnk/jfn4Fbvt0X9Km6ZmwTc6NfSwainRlM8oWIxsdGIMIjYoqgzGhy4FwQ1cF5PLmIyPYa3SCeFONN7mucXXw6yNRKrMjNK5M29bp5pNpWvVGDmTUnZGs2X2kcJD4S3KMZpOrHwnfk5fJAvwnsNUiljn7liEzz/IXVLq2vXTdOsOBr9Kmtv8A6F6v39F9543Im222227bbtt+pc1GSebLPLlm55Ju5N8tlXIj3MyScRq9VmgGh0kKkadSBnBNAtB0IOokgXQg5nHUHTRRKQSQSQdAUqGJHKIaQgFBVsEok0IAou6DC55U2tluxGLG5ySSNrT6f4ONJr5nu0Rq+uBbw82WY+pGkwvJSHzxKM5Q8xOa44cHgyKMkze0mRTxqnZ5uDaZr6DM4tJvcM/46lXmtZGL9pNKp6WOpivmxOn7xf5OjaTTSaByY4ZsU8WRXGSaa9Uzu/P7z+f+qseBvc6x2u009Hq54J7uD2fqnwxB5Gs3N5WLnwLmhjAYoCWqIsOQtnT46SJPYR31Ma2ytlbTs6YqNLBPguwdoydNO6NTA7RFUY0FBbk1sFFUwMajaOcBkVsS1sBq7j7AuC9B7QDVAC+yvBKiHRKRPCCohKKCSolC4ExihiQMQ1yEgdR3bZKVhqJfABQ9jlBIalRDQzLaSFz3Q5oFxtDJVkqBobkVCm0luxhAM5qKtuivqNZjxJ7psw9b1RybUGVJ0NDW9RjjTSaswc+qlmk22VsmWWRtttkJ2X+eJuv+HJ2NiIiOgyNJWIDYioDYmYNihiQuI1IqElI5koiRZBbFthMBsoIbAbObAbAObBbObAsRis4EkmhJxxwqHEEkEhBxJ1AEUdRNHJC6HLkNApBJUTQZENARDRNAkdRKJJAKJSCSsJR3J6QVEYokqI1Im6AEgk6J7TqojoEmGmJuiVIqGejmrQtSDTtDgBJewpwse0N0OmlqtVDHBbtm3jn6vF591p/Z7pkss3lmmoLl+vseshjhBJRikl7A6fDDT4IYoKoxVIcevjxzMaW/8RQjLFLdFgFxT5RVhKij86otQ/VViZx7ZprgbF7Ch8RlTaTSuilqE6Vcs0LTK+fHbtcIYUJttKDe4eJNZIrwFOG6l6EK+5P0Yz40car8AwIO0n7BWvUCrm65EarTx1GFppXWzGzaa2ZCmkt2TZ0c9PLvD2Z3FqmmamGFQWwPUcKWrjONVJXt6jcaaVPlcnnbz+dUv7NSoOK3AtJW3SE5NSoLYWcXV9FV1NJW2kvcF6jGnSdv23M1znmaTbpl/DgUUrW5158Ek9rh+K5tSapLix74AikqS4oJqzbOZmchULgpJp7p8nmeq6f4eVtLaz1CVFPqOmWfA6W68keTHciPFTlTGY8t8MXrMbxzcX4K2PJTpnnbz1NauPJ7lmE+DNx5Ni1jnwed5cpeZk+RWTgNsVkkevCKkKYUnuA2UTmC2c9zkgDjqCSJoXQBIlKgqOr2F0OSCSORKYdAkg0gU9yUyegaRyVuiE7LeiwPNkSrZbsOku9NwKCebIrS4T8suRTnO3vZzSVQiqSL+k0/yKbX0Ea30zTu035E6jNHLr80sa+R7J+tKrLWryfoumUIusmRV9F5f8DKlL4eFyXL4MvJrnppJyDVqdP1LuC9pL9ZftMjS62GdrHkajkT2b4f95uaWPfFOOzS3KzJqJjS0moU40+Vyi6vYypQeJrNBbPlGhgmpwTs6PFfzfzVxifabSOUcergr7E4Tr05T/G/xPNXTPoWWCyY5Qkk01TT8o8X1Pp89FlbSbxNvtfp7M5v5fj7r9ROp/ajYMmc2BJnHIgMmLbObBbOjxwnNlfLuhzYrItjpgiNPOpUbGnlsYePaZraeT2J3GsaUXaDWzFY3sNsgzYPwN5RVi6Y5S2GEyoWwm2yACErDpJAp0S2xBzIT3BbITtgFiCsYo2KxsfFgBKIaRCexKaKDqBaC7kA5IOnxzRDpLch5KKmozOnQdBer1MMabbRg6zqj3UXR3UMs22m9jEyttsrPtF1xOo1U8rbbZWbsmT3AbN4i211hRAvcZEKDYjoCYIfBGWgfDgdETBDoozBqGIBBlwhJgtnAtlEFsWw2xUmMBbAbCYLAwsgmjqEHImjkiSehB1E0cT0Io6iTqEEUTR1E0IIo6iTkhByW4SRKQaQrQhINI5INIi0OSOS3CSJSI6HJBpHJBpE2klINI5IJEWmithcthj4FzCAtshSIfIKtvY0kByb8DUxUVSCsrg4bdnpPstp125tRJb32Rf7X/Aw+n6TLrM6x41d8vwl6nt9FpoaPTRww3rdv1flnd/E8d7+q0zOTqyccQ3R6KnWRaIclVtoU8lPjYRydNmlJULuk0wXkd7FfPOSaSfLFxcn9LUJdttvk6c4tLdFVt1yA22ueB8P8y0xzSbXqApJSbS2ZFbpgzdJ+qDhnx1DjC74Fzy5GrUqv1Kzi+1NOt7a9RrmqS9APnBrNJw7W90KnlfYt2Lk6ntwxOadQX1GVgv0lSywvdxbdepaxNQxqU3u939TK0rbyZMj4WyLDm2lu/ozHfjm6Vwdm1Hc6XHhFffJNJ7r0Act6St+S7o8LT75bv0LmZmchTB2jwtSc5pJLZIvprwV03KSS2iv2lhUlYzs4mErlVeRguK+e/CQblVLywqKk7Zgykkvdg96Sq92LpceU69p+zM2ltZ52cqdo931jTrNpnNLeK3+h8/1UuzK4+jOLyZ5U6W8OVVyXMeTjcwoZmpL0L+HNdbnD5sekMuc6RXnNth5GxEmd3CQ3ZFkNg2PhjTJQKYUSaBpBJHRVjIoztHAqJziOUDnAn9BXao5WPcPYBxY5ogphIiqOQyNxxcpJI9H03SduNKqb3dmT0vT/FzptbLdnqs0Vp9A2tsmTZey8i+qk/tQwwU86S3V8+pvYlBKuIwVt+iRi6LJHFkUpK16F/PqILTKEZK8rp+y9PvFq+zyq6jNLUZ5Tlsm6S9F4Keok3slskaWTTuGD4q3TV7eN6/eVcmHt0mbM1+rBv8AYc/kvFV5nuanafk9D0fqUoNQyO/R+TzTfzl7SzcZporVuZ2M5ePoODNiyQptK1wxkYPDPbeL49jI6XmU4q6fhl3Xaieiw/HjcoJpSXon5L8P8ibn+TbjSUlJWhGpwQzY3GUVKL5T8/3lTB1TTZkn3JP2fBdx54T/AFZKX05Ou83ODry2v6NPG3PT3KHNVuvqv4mJmxzxv54te/g+h5MSn82NtS528mZmw6XPN480F8Rc9u0vq15OLfiuL7Rc9+PDN7kWeo1X2dhkt6aab5pbP8GY+o6PqdO3aa+qovObEXNigBNWh8tPlhzHb1EtGsIrHH5zTwKqKMF86NHCuCNVpFrG9hqYuC2GRIUjup8jsbtFVv5izi/VsfQa6FtpHTnS5M/UarttJhFcXviJPk55ElyjGetl6gvVzae4+E08mphF1aIhqot8r8TzWo1s/iUmTp9VNtblfn0nseux5k0tx8cifBh6TJJ1bNLE2zPqovKbYSk2IghyQv0YtyJEnMXTKkipnWzLkitn/VY0157Xrkw8r3N/XrkwM2zZt42NIkwGyZPcGzpgEuRkELiNghaBsEWIITBFjGjDRw2KGxQEUNiiCGgkCkGi4TgGwwGMgNi5BNgMOgLBYTBF0IomjjqFabqOJolIXQEmiaJJAaJokihB1HUTR1AEUEkSkEkTaHJDFEmKGKJnaAqISiGoBKOxF0C1HcJLcJqiUhdAUg0jkhij6k2hCQSQSj7BdpIKaAlGyw4A9ll5yrio8bbD7EkWVjrwLlE6M5VwhKjroNot9L0MtbrceNJ9qac36JcmufH28Ez16/o+ijo9DBdv8pJJzfm34+40TkkltwcetmcnFJFZZKMGE5pcbiMzck7GrM9ocqVgLc604e6ITfkGnEp+BWZ7xXuFJ9rvwKnJPJGnYAbk6QEN20S9myFSdryB/wBJT2a9CvqMvbS5bdDMmRQnTfIhLvk5tWk9gENul9EC222c3t9UC3UU7A0TlUUylnm5QbT2THZ51ibvgUsN6NSd3OSsVXnMvsWGPZplfMt2c5tJvz4DyOlXhKkVVJzypcK92EKTq3p4X8758I08KdexUwx2Sr6FxPbbhbIFanDMTubVDYq5X4QGGKScn52CUrdJ0vLBjr3TE0rfqA93s973YmeVuVJeyDi2l/Fk0ucTOajty/QW8jjynb2JdR3bv3ASdubpbbX4Fwq7Uty0uRPntbPm/UFWqmvc+h55taXNK+IOj55r5d2pm/cw8rLXxTHYcri6bEtoFv0OfU6hExEuB8kJnsaJIlsQmFIDyMzE7DjuKix0FuRTOgh0EBBWWMaMNUxRiE8boOER/Za4MLrgUpQrwLcS5OFeBEoFzSVZoKELklQTRb6fp/iZla2W7L6Tb6LpVFRTW73ZY6nmU8qxxfyw2Qemk8buK3apexTyPvyuV2r2Kyu/ODwY++SXoW4YVLO1SlFpJNre1wl6O9vvMjN1HDppqNub4cEuV7vwb2gzYdRjWVNKLpbcp+G/2D+08iacenvTytTg4Sl6tP8AvK/VnDD0jJjW+TKlFJburtuvojQzwwT1eaLtNRjN3xT83+Ox5LqM88tZmlPE8EG6xwprbw/dvmzHyY/y7f6Vq8UI6XLKbaxyr6Mt4tPODTcGvqheDDKbttv7y/jxuKVWY+Tf9MuLnTszxzSbpHpYdmp08seRJxmmmvVM8mpSg+b+pudL1akkpNX5MMa/Outc3+nntZpM+gytStJSaUr59H+B2HqGfE1U269zc+02jWTBDWY182PadLmLfP3P955k6bbm+ka9Vs4ftBmhXem0gtT1XR6+ChqYyhNfq5Y7OL/ijDYDLnm1zlL9NXNPW6bF8XSa1Z8K3dNNr6p7orL7QdTiq+PFr0lBNftKDXItoeL/AMK1oz6xkyKsuj0k2+WoOL/YyjN983LtUbd0rpfS9xdBpWdEvou2/RQjvwX8KpFXHHdFzEqRlr6qHx4DQEeAyWhLfzss49oIq8zZaW0V9B0oTnl8rMfUNuTNXUOkzJzbyYRZD3ZDdRb9gntyJzTqL+hcRWXmleVv3LOl5RTk7yv6l/RRto236yiNzRJ0jXxKkjM0apI1IHLW0ixAckKgNXAlJohoLwQxEVIqah7Mtz2KGqlSY4msfWvZmBqNpM3NZK0zC1L+Zm/jZVWb3BOfJKOohRHwEx8D4IjQOgixjQiCLEEYUHRQxICI2JMAkiTqOKJD4AbDYuQWkBi2GwXyLoQC+QmRQdNyRxNHUT0Iok6jqDoccTTJ7W/AugNE0EoP0CWNvwT+oAJEpDFifoEsTJuoC0g0g1iYah7E3QRGIyKJUKDSM7QhBJEpEpEANEqG4SW42EL8C6ARgMURqx+wax+w5nqpClCwlD2HrH7BrH7GkwqRW+GcsfsW/hkOBtnJ8VHCkKlAuSgJlHlI1kNVWNzmoRTbbpJeWe06T0+Og0qhs8kt5v39Poil0TpSx1qs8fnauEX49/qbp3+Hx/mdp/HN0mxMsjaVbBZJbNIV+tHY3PMcnTrwDPendHS2V+gLaobSBeza9SF6Ml7oFyVApGX9Rp+oDgk1Jbex2SdwdcoCGTuSXqhFJUydtHJ7NegDadryg0+H6jMiaWTK1LelQUUowpcLYCLSlN+4baaaEHWtivOba7UMTt16C3vf1AynBzhXl7FucFCGGFbJNsDTx7pRTXmx+oV568JJEaqu+lHMm2l+JGHGnnS8IdOFu15GafHScvLdIfTzVjEqTf4DoKkr+ouMGkl6jafjkcLVHbpJeAMmRY47v7vVkOSSq1Xl+ouFZJd74T29wTw3HGvme7fj0Dcm3Sdv9gDdUuQo8Uk22SVQ1NtJNX6+hzgnty/Vj4Y+1O92zsnbixSyS4SsTOsfrerho9E4NrvmuPRI+fajJ3zb9WavXddPUauScm0mYmRnNvXay1UORFgWSnZmzNaEzRYaFTRaVaSFtDpoU0M0we4/HyV48ljHyRpS1jXBYxoRjVlrGkcu1HY43RZSVUKhSSGJ7HNoByR9itOJbbtCJKys3hELG5ySSN3QaR4YK1Tmrv2K3S9N8XOrXk1+pZEnjwaVXkTUJvlR2uvrRpm205FfUtNvHjnSX67T59vp6mBreozbePTyqK2c1y/p6Gn1vUQ0mFdP09PI1eaflX4+vqedaN5eROvoUndnrej5lj6ZqZKClPHj71flJ7r8GzzWnwSyTSStHq8OlWHo2bIml3r4bk3tBPlv2SHn3TwjreaeTpE3pm28kEpPz2J219ePuswsKnPTwUpNpNtJttL6ehf0XUXly5KjeNSXbBr/AEeF/f8AVisWNRyLHH9WLaX0sx82uQa9+1jT4UoqkPcEg4QpJINxs8zWu0RXlFUL0+d4M6bezZacduCpqMXlIrNl9Ur369XpskNVp3CdSUk00/Ka4PHa/SS0WryYJbqLuLfmL4f/AL9DX6Pq3CShJ8Fv7R6T4+jWrxq54Vbrlx8/hz+J2Yv6zy/Yu/5R5Mhkt0gWxSMkMW0G2A9zXMCA4oAbjW5vPhHQRZxrYTBFmCMauGoJ8MhcHP8AVY1dJgrmW5bJfQr418yHzdIDirqHsZWZ/MzQ1U6TRk5ZbvcqKtBNlbNL5WMnLYq5pWmaZiKpLef3mtoI7JmXBXI2NIqijTy30MxsaZ1RpY3sjJ0890aOGWyOWtYu42OTK0GNTJUfaaBb2A7iHLbkAjI9jL1s6RozdpmL1DJTasqRnpmamfNmNqHcmaOoyWnuZWWVyZ0+OMqBkLk4lcm5GQQ+AmCHwRnoLEFsOghUEOitjGmbEdHgTEaiCGccmc2MkSYpsOTFNgEMhnWdYgiiaJW4yMLYreApRb8BrG34LMMO3BYhh9jK74fFBYGxi079DQjhXoNWBehnfKPyzVp/YYtP7GksHsGsPsZXy1X5Zy069Bi069DQWH2CWH2M75T/ACzvgJeDnhXoaLw+wLxL0F/6j8s54a8EfD9i+8XsBLHQ/wD0L8qfZRPbQ9wAcSv0XAUSkEo+wah7DnsuBhEsY4EQh7D8cTbOTkSoDI4/YOERyijWZXIUoewah7DVAmkjSZMrsAcUh0mhM2UCMjRc6NolqMrzZI3jxuknw3/cUcjPR9Hgo9NwtLeSbfu7Z0eDM1oL4MpU69QmKk7dpnacQ+GmKg9mvR0ht2hVU2m6vgao5z2piptL6HZLttsU5tbPdA0ju9p2uPQGb8pgSbW63XoLnkSi2ufQFOeRNOmm14IwpvGrfO6IxYU4fOt3uyxGKUa9OAV0tp7NeeQlfa0/AaVqmc1sAtVIN036smb7WrCUKWx0oprdWJNBjVycvUlQt16sJKkg41u/QVvEdO0eO8rdbJAZH3ZZy96RZ0ddkp+tsqt7P3dmNvs+ghBznSLGHGm0ktl+87TQpSnW9FrBjpWype0+8geyt2KyTq0ufLG5p22lwVckVk+RN+9Gh5/6BJ5Xz8vl+o9KkklREIqKSW/jYYov1peiA7XQhb9WWYQSVvn1F447quENSbkl48iZaoqbfojM6/qfgaJxTptWa3B5b7UZu+TgnslRHkv5yiV4nUTc8spPdtsq5GWM6am0VZs5WVDZKe4JKYJXGhckPoCS2KQqTQlotZI0V5oIotbMdje4l7MODDUVF7HKi1CaM+EyxCZzbybQhIYpFSE9hqmc9yDmyEnJpAdxOOaU03wTwNSM59P0L1CpTn8kG/X+4y9Nr9RpMryYppye7clab9XfLHdS1i1U8cMd/CxQqPu3u3/D7jPZrmcFvv0PU6l6huU8UFkk7c1abfm96K9bhMhKy+obPRM2jwzb1koqKTe/l1stin1HX5dbnyVOcdO5XDFeyS42/aVUgo423sg/fJw+3nFvpMnj1eNtNpumvVF7TNT1M5JUnJtL0VlTTpYUnXzvZe3uaeHGo5slKldr2tJ/xOfz3/E1mKDq2FCFoNQaPMtaFOO3AnNj2LjWwvJG0PNKxlqTwZlNOqZ6rp+ojqNP2tJpqmn5PM6qG1otdF1Xw8ig35O3x75f0nN5eMvqmkeh1uTA77E7g/WL4/Dj7ik3R6/7SaT9J0C1ONXPDbdcuL5/Dn7jxsmdVnvpanKhy3OsFsiyspGnuOx8iIvcs4Vua/0S1jQ+CAxrYfFHPq+1RKRD/VYygJLZhKpGJboLJwTiW5GTgfTjN1jpMycj3Zq617Mx8uzZpk6XOWxUzPZj5sq5Wb5iaHCvmRrYHSRk4uUaeF7IPIrLSwyprc0cM+NzJxS4LuGfCZz2NI1ISGqWxShPbkcp+5Cj3MjvFOYDn7jgOlLZnnOpZ08zSfBs5syhilJvZKzyOp1HfOUr5dmmM9rLd9l6jLdqym92FOTk7YJ15nGTjlycEluMGwXA/GtxEEWMaMtBYghyQuCGpbGVMS2GJgLgJcEgVnNg2c2BIkxbZ0mKchkJs5OxTnuHB2FgWMasuYcd0VcKNLBC6OfeuKkHDH7D44/YZCHsPx4/Y5NaXIXHH7DY4h8MaGxxmN0rhCxewaw+xZjjDWP2J6fFVYvYJYvYtKHsSobcEnxUeL2AeL2Lzh7AOHsA4oSxipY/Y0JQ9hUsYdsTxnvH7CnD2L8sYqUK8FzSbFVYw1Aao0w0kdOKXC4wHwjRyoNNI6skOC2GpC1JIJSNoqUy0gWwbsiQx1EmJm7GSYuQh1Xnyem6Pt0zBfo/3s81JHo+j5I5On40uYXF/VM6v43+1OLmSVJJcsVuMyK17oBb/U679PqLX0YGRpum6Yb2e6FZYd3zRe69RqhE522nyLaathZEpc2mvIptpU3a9UC5XJ2+Bbxp5rlumh8Y2ruwckKXd6CtPo4x8BqFnY1e5YUEHU/olQd2Q4Piiw0k6ZzirsXR+1P4dASjsWZqm0Kauif0XSWqQuU+xP3GZHSsqxTyZU29k7SMtbH1qwax6NvzVFTngZqZ9umjHy2DgXdJGf67VSLuGFYkq3Y+XyY6XLIxpNr2DyptbG+P+pt9qOVtp00vdgYla9vbyOlC21WxDagqVJF9a99OVJcBLfhWLU097tDsNybpUl5oE2jheySosKkgElFX+07uvgO8ZX2nJJQg5vwrPHdVbyTk3vZ6jW5O3A1e72POauFpv1Ofza76L5HkdbCpsz5+Ta6hj3boxsmzZllmWiUwUwolJaLRDWwVEPgaSMiKs0XMiKuTlgatM6LJmLTplGsRkPxzKcZjYyM9ZNfhMap+5RhMdGfuYXAWlP3O72V1MnvJ/IpzkQ2LUrJsOEl8hxQEd2NihUhKI/TzWPIpSVpPj1Aig0rMrTHqcyy6rJliu2Lfyr0Xg1dHleVuckk2kml7KjFmqaXua2hVRTRl573J5vtrwVJB0BB7Iat0ec1iHDYXNbUPfADSYBn6iCaM9N4cqktqZq5o2ypnx3G64N/HrnplqPQ9N1C1Gn7ZU7VNPyjxnVtE9Br8uCn2J3B+sXx+HH3G10fVLFl7Hw9i19qNH+kaCOrxq54d3S5g+fw2f4noeO/rPFf7ZeMZATVAs0lZijyW8PKKceS7p92jX+iX8S2RYhEVhVospbHLu+1wLQMlsNaAkhSm7Ggci5G41sBmVJh0MfW3TMjLyzZ1i2ZkZk9zbFOqU3TK2TdljKqYia3OvKUY9mXsM/BRWzLOJ1QaVGniZZhOjPxzqizCaMbGkq/DM0hq1CooKSCc0k23SRH5V1t9Dj+m9VUHFSw4oOc01abeyX47/cemyaPQY8cpz02JRim23BbJcmf9ldE9N0xZ8irJqX3u+VH/AEV+G/3ivtd1BaXQrTRlU83PtFc/i6X4m8zMwrXketa5TWT4aUFNtpJUkvQ83NtstavK8k6vZFZqysTkYa93oGC+Amjo455ZxxY03km1GKXlt0l+LNYT6J9jei6DP9ncOo1miwZsmWc5KWSCbq6St/QT9t9F03p/ScS02iwYc2bKkpQgk0km3v8AgvvPW9N0kdB0/T6SHGHGoWvLS3f3uzwf28z5dV1fHpsWPJLHpoU2oNrue74XpRPe1rfUeXgixjQMNPn/AOBl/qP8ixj0+Zc4cn9R/kZ6ZGQWw1I6OHLX81k/qP8AIYsOX/hT/qP8jKmhImiXCUNpRcW/DTR1EgDAbCySUIuT4R6SH2PzZIRmtbBKST/m35X1KmbRy348pOdCJTPXz+xOeXGuxr/tv8xT+wuof+0MX9k/zLmKPzXkXMPFPc9T/wCAtT/6hi/sn+YUfsLqE/8A8wxf2T/MdwPzWJilUG090tj6B0bT6fJ0jR5Z4MTnLDFtuCbbow4fY7PGDX6djdqv5t/meo6fpno9Bp9NKam8WNQckqTpc0TjHLexpmc+q/UtPhho5Sx4oRdreMUnyZeOJv6rA8+F41JRbadtXwUl0yS/82P4HJ/J8O9a7ielqsIjYxosrQSX/mL8GGtHJf6a/A5P/pfN/wDr/wD4CEg0hy0sl/pr8A1p2v8ASX4B/wDS+b/9f/8AAQokqKGywuMW+5OvFAGe/Hrx3moYXEBxQ0FozBEoipRRYaFtE2ErSiInAuSiKlERKbW4O6Hzj7CmjXGuJsDbRykQ0C2dmNJpqmxqmVkw0zolSspnXYtO0EjQ+uYDVjKsFoDIki30nVLTar4c3WPLS+j8P+AiSETWzKxq512Kevm9wGk906fqI0mb42kwz5bgrfutmOq+Nmej0uuTa2aAk+3jgKm1T2Fzi99k17FKhWRRycOmValGbT5/eWm4p00kwZw7la5XAqcvHYkn4oPJjuDXhnY00knyhySaom3sK3qti2LMXaEJNZZKtkx0b2IlIOpdYXJPdKyvpdbDKnFtWhPWdU8OBwT+aZgYM2TC1NSbSe5hvzfnXA9XNq9vKFTaSRV0utjqFFJ/NW6GZpqKSb8lTySzqoVqZXUU92wcbqaS8IW59+Ry8LZEfEUE5t8cGd1/bSQzPl+JnjBPaK3LmlSScqMvBc25PmTs2NPGkl68kTXVWL2BPstrdjGrR0UkkkSejjPM8YW+1fKqVIrOr3/AtZbbpIWoJO+X6sjq++i4Qcnb2XoWYJJbVXsAkrGK+ECUtpIFbIlqnfLOatCtClqE5Nt/cZepx2mbWWFoztRDnY5ddpV5jX4U09jzmrxuEmez1eK07R5vqGn52JnpFYy5CiC4tMKJpUNMiRJEgIqfBVyeSxkexWmxhXnwJG5GKLhuuhkZi2cmPgWVMYp+5UjJjVIzuQsqZKmITsJPYi5JYUglIQn7jIvciwLMB8FZXxssY2YaM2KGIWmFZlQHK94/U1tC/lSMbM6nD6mvoJJxW5n5p/gM/Wtj8FiLK0GqTRYg0zzq3hnKIcdiUFVoAqzx27K+aGzL8olbMudhz0VjIV48yktqZ6nQ5I6nSdmRJpppp+U1ueb1ENrov9E1PZPsb2Z3+HfvrPN5ePOa/SS0esy6aV/I6i35T3T/AAKjR637WaTux4tbBbx+Sf0fD/Hb7zyjR131S1OUK5Lmm5KiW5b0y3NP6Q1cKtFpIrYPBcitjl39XAtAtDWgGtyIaYKkLyq0Pitgci2GcY2rhaZlZobM3NRC7M7Pj52Lx9FYeaFMrSW5o6iG5SnGmd2b6Iqg4OnRzRFUV9CzCQ+OSigpNDFkJsOVoLJ7l7o2ifVOp49NTeKPz5n6RXj73sYizeFu3sklyfS/sx0p9L6cnmilqc1Ty+3pH7l+1scz/a57a85ww4ZTm1HHBNt+EkvyPlXXeqy6j1DLqLai3UE/EVx+f3npPtx1vsj/AIq00vmaTztPhcqP38v2r1PCN7NsqzqdX+kN77si16o+lfZPommxdDw5NZpMWXNnvK3kxptJ8Lf2p/eU/tv+gaHpsNPp9Hp8efPPaUMSTUVu3aW1ul+IyufXXgGj1X2D6O9V1D/GWaP8hpm1jtfrTrn6JP8AFoyeidF1HWtYsOJOGGLTy5q2gvRerfhfwPqeHDpOldOjjh24dNp4ct0kly2/X+JXwZz/AGsZcuPDDvy5IQjxc2kr+rFfp+j/AObwf2q/M+Y/anrz63qljxprRYW/hxa3m/8Aea/cvC+phxxw/wB1fgLkh3b7X+naP/m8H9qvzO/TdH/zeH+0X5nxuGKD/wBBfgW8eHH/ALi/Ai6kH7fWf03Sf81h/tF+ZYTtWnaZ8gz4cawSagk69D61p/8ANsX9BfuQS9VL15T7Yq9fpv8ApP8AeeerY9J9rleu0/8A0n+88/2GG/8Aao19U9Wv5Cf0Pqmn/wA2xf0F+5Hy/WRrBP6H1DT/AObYv6C/cjXx/Dwwuufaf/FGu/Rv0KWe4KfcsiXN7VT9DKf2/S/2VL+3X5FX7br/APG1/wBGP72eVyeSpr3wrq9eyf8AhDiv9kz/ALdfkcv8ISf+yp/26/I8M+TkX6L9176H28U+OlyX/fX5FvH9r5ZOOmtf95fkfPcE2mbGly1VnP5d6z8Oar2UPtJklx09r/ur8hq69la/zB/2q/I89gy7Lcu457HFr+T5I0la663mf+oP+1X5ErrGd/6g/wC1X5GdCfuPhP3Mr/M8prq6rnfGhf8Aar8h/T9c9a8qeF4niaTXdd2r9CgpjehO8uu/6i/cdH8b+Rvyb5oNbL/NS+hVTLGZ1hl9CpF2Z/zv95//AAGHEIk4TA1YDQ0FqxcIhxFyiWGhbQuBVnEROJckhGSIiqq0LkOmqYpo38ekVASBSDSOzGkmwGpWKiPhujeUOS2OaDrYhopZEkJlEstCpRALnSM1Rnglwn3L6Pk091una9GedhkeHLHLG9uV6ryjcxZo5ccZ43aa8Hd4tfrJHqaezf3HNOtt/YV3Rvfknv8AR2a9LoZpN21T9yYqvY5zi9mDuuHaFafRNNNMYnTBTTVHNpEd4Ez2afqEmq2FzaeN+24nUZ1j0rknvVEW8DD6pkebVSbdqOyKKfgbOTncm+WKhu2eXrX610LWjn8PIpL7y9mn8Zt3slt9Sjig29kVOo9bwaKDxYmsubdNJ7L6s08ct9RrnNt5GlKax41bSSVttmbrOqabGknlUmuVHc83m1mq1028uRtPhJ0l9wh6d+Wzqniny16ni/g71O16NfafFjpY8DdeW6HQ+2WSMk46aC+smeUenkQsM14ZrPHhvP4fPsezj9sdRk2ShD6L8w5/aLWONxztP6I8XGEk+GizGU6ps2lXn+Jj/wDV6N/afXxdPLFr3ijQ0X2n+I0s0I7+Yv8AgeNtvZoKKaaatbj9DX8Tx37l9Q0ufHqIqeOdp+hbuo2jy32ZxZpJZJSfal+J6lLu8bCs5ePG/keOeLdzKKNNWc0Sklwcws9OcrJG0Us8LTNFq1RXywtHNqcNh6nHaexha7Baex6jUY+djI1eG72M7EV43U4ak2kVkt6NvW4Kb2MnJDtnwVKzq42A2c2C2UReRlafkfNlfJuhmrzdsAKXLBNA5kEs4A5bBpgIJCBqYaYpDERSHFjYsUg0yKFmDHwZVix0WYahrSkFYhMNS9zKwnZd3H6mlorSTXBm3bXszW0dOCMvL/qc+tTDK4otYylj2ou4+Eebqe20NXAxC06GJ2I3SWxUytXRbyOo2UM0krYypGZJpoq4JvDnTXqOyZFXJSzZEnd+Tp8XZWWnr3CGv6dPDk/VyQab9Pc8BmxTw5Z4siqcG4te6PY9C1ayY+xsyPtXpPg62Opivlzrev8AeX5qvwPRz7ite515+ty5p1umUy5p/Bp/TJq6fgux4KenLsTl8n1pE0Q1sHQLIhpgtgcmyYyK2F5eBw4zs/JSy0y7qHyZ+V1Zpk2fqIJtspThvwaGZ2Vpxs6s+i4pONAtFmcRLRpKOFNEUG1QvJtBteEVEvXfYnoT1OddV1UP5HG/5CLX68l/pfRePf6Hp/tJ1vH0fRtpqWqyJrFB+vq/Zft4LnRUl0TQJKl+jY9kv/pQ3LodJqMiyZ9Lhy5Eq7p403XpbQ2knp8czZZ5ss8uXI55JtylJvdt8svdB6Y+rdXwaam8Kffla4UFyr99l959UWh0ONWtJp4JeVjiv4FiEYQVQior0SSQF+UpKMUkkkl44SPk32n6n/jPrGbNB3hx/wAni9KXn73bPbfbLq3+L+mPTYZVqNSnFU94x8v+C+vsfNGk1XgC1f6fRemdX6b0T7L6CWaUY5MmFTWLGk5zb5de/qzx3XvtDrOs5O3I/haZO4YIvb6t+X+z0Mqq/Ci70npeXq+sek0+XFDKoOaWRtJpVdUnvuPvam230zktxkEbfU/sn1Hpeilq80sOTHBpSWNttJur3S2v95jwViqbOHY0XMUdivhjwX8MLOfVMGoh/k09vB9T0/8Am+L+gv3I+aamH+TT28H0vT/5vi/oL9yL8d9NMvNfapXrcH/Tf7zCcD0P2mV6vD/03+8xXAx3f8qVntRzYlkg4Phlt9a6zjiox1zSSSS+FHhfcL1FwxSkluket0fR+m5tDp8mTSQcp4oybbe7aTfkvx/q/BJXgeo6vUa3L8bVZPiZElG6S2XHH1MvImm0fV39n+kPnQ4397/MW/sz0R89Pxv73+ZrJz6VxXyV8nI+s/8AhfoX/p2L8Zfmd/4X6F/6di/GX5ll+K+Vw2ZoaadUfRF9mOhrjp+P8ZfmHH7N9Gjxoca+9/mZ7x+j/FeNwZKSL2PJseoXQulx/V0cF97/ADDXR+nLjTRX3v8AM5Nfxbf7XI85GY+E/c3l0vQLjTx/F/mEum6JcYI/i/zMr/C1/wBhsWEzsMdRgnklptQ8ayNOS7U7a+qNtaDSLjCvxf5hLRaZcYl+LHn+J5cXudBnYs2qprNqHkTXDil+5DoyG6zDixYHLHBJppXbKkJe5yfyM7zvm72mtp2HYiDGJmPTGccjhgDSFSQ5oXJCImS2EzQ+SFyQiVZxEyiW5REziKXhWEUEkEkTR1ePaLHRW4/GKSGw2OvNBvhENIlcHM3lUFq9hUojWC9xhUmgcOoyaedwdpvdPhjsiEKNzQTVz7gamPV48iXc1F+jGqaq09jIauaS9S5r29PpI9jp1yg8f8u6vLEz2t96fIcZbcniNR1HWqbS1M0k9qdEYPtFr9O0sjjnivE1T/Ffkdc3KT3amvUXkyKrvdGRoer4tfivG0siXzQb3X5r3GZcrq7tka0a89VBwac+TJ1fUF8H4La+Vv6lXNmcW2nSfj0MfPklLO3ezRjvfYuLuPUKSauqZYwuL5a9TKxqn9Sv1XVSw4Fig2nPl+xy5z+tch5naf1frjSlptG6hxKa5f09EeceRt22Kc23bYNnqYxMTkej4pMz0t49Q4cUPWt9UZ1hJ2O5j0Mefc9NbHqMcuXRYi4NbNGHGTQ6GRrhsi5duP5HfsazhFu9jo472KEM8o0220aGnyLIk0ON5rOvhixJO2Nx4VknFJbtpHJW6RsdC0L1Gsxuvkg7bo0z/wBZebyZ8eLqvW9O00cGmhCKpJIupERSSSXCJLkfIb1dXtccccNKGhc1aGgtWY7yFDNC09jM1OO09jbyRtGfqMdp7GFKvNazDaexhajE03ses1WHZ7GJq8O7dEVGoxrIbItgtmiATYmXA2TFMIZEluCxskKaNIEM45nFByCQKCQgNcBoWg0RSMQaYCJRFB0WNjIrp0MTM7AsJ7BKQhSDTM7kHJ7o2NE32oxse7SNjSLtSZz+af4nPrVx+CxCVOipjkqRYxu2jzdNYtw3QcU0xcHsNgQp2Z1jZjZ8m7NfMm4NL0MXLBubRpiTqdK2SbfBXyQbRe+E34JeBtcHRNyM7KDo+Z4dQk+Gzf67p/03o2Rw3njSyR+q5X3qzzfa8WVNbUz1vTcyzaZJ7ujs8W5aePnHzu97XDLumfAHVNL+hdRz6eqjGVw/ovdfl9xOle6Oj+kfK2NOXorgo6azQitkcnk+riWiGtw2D5IikrZCcvA7wIzcMIIz8+9mdm2bNDNyzPzeTbBqeR2xTWwUuQWdUMDViJosMVNFQlaSFySaafDHTQplwjI6vVwiox1moSSSSWWSSXotznqdU7vV6h3zeWX5imy50rp2fq2vhpNMqb3nNq1CPlv+HqyvZNf7I9In1TqC1WpeSel00k33ybU58pbvhcv7vU+h6rU4tHpsmo1ElDHjTlJ+wvQaLB07RY9Lpo9uPGqV8t+W36t7s8F9revf4x1H6JpZf5Jie7T2ySXn6Lx+PoFX8jG6x1HL1XqGXV5bXc6hC77Irhf+/LZRCZAmaGO6drZdN6rpddFusU05JeYvZr8GxL4FzVppjn0n23Ljw6zSzxZEp4c0GmvDTX5M+R63RT6f1DNpMn62KbV+q8P71TPon2M1v6d9m9M5O54U8M/rHZfsoxvt109RzafqEF+v/JZGl5W6f4WvuROvTTU7OvMYY8Glghstilp47o1cEPlRyaqYXqo1pJ/Q+iYP5jH/AEV+5HgdXD/JMn0PfYP5jH/RX7ka+H4uMP7RK9Vh/oP95jOBudfV6nF/Qf7zJcTHyf7UVUzYlODi+GqYP6Z1LDBQx9QzxjBJJJqklwuC1KOxVzRtMedWfCV8vVerxuup5/xX5FLN1zrULrqmf8V+Qeq+THKVcKz1+l+zHSNRodPlzaaTnPFGUqyNbtJvyb4uqXLXhP8AxF1z/wBVz/ivyJX2h65/6rn/ABX5Gh9sel6LpWr02LQ4nBTxuU7k3e9Ln7zz6Rt2otsaS+0HW3/tXP8AivyPX/YbqGt1+LXPW6med45wUXNrZNO+DwKR7b/B1/NdS/pw/cxS9Vm3r1mulKGh1E4S7ZRxSaa5TSdM8BDq3VWlfUs/4r8j33Uf/wAu1X/Rn+5nzHG9lv4MfJb/AErVa0Op9TfPUc/4r8hy6h1F/wC0c/4r8jLhOh0ZnPdb/wCp60o67qL56jn/ABX5DoavXvnqGf8AFfkZanT5LGPJ7mWteT/p9auPUaiUHDNqZ5U3dSaLGOfuZuPJZbxz9zi3+tXulStCEh0ZFOEh8JGSllOwhUWMW5RpYDQZDQAhoBosNC2hEruIqcC04i5RJoVGqIodOIFU6Hm8TYFKhkdgaJR240RqZLFphWdOdBzAYTYMjSUFTAgt2/QOTISqDZO7zJV2mh36hLxZPW51HtT4Q7psLyt+hR63O5v60Yfx56tE+PM6h1JspT5LeodtlWR35+JAnKElKLcZLhp0195raDXazJDI82VzhFJLuSu/r9DKas0dOvh6PHFcyuTf1/uDd9FU5s+Sbdvb0QGGM8snSbo6rbsdppqD3OX/APoF8GcWm019TznVs0p6uafEXSR7KGSE1tTPK9f0jhq5ZYq4zd7eDb+Pyb9t/F9ZHccmBVMI73fi0Vkpg2SmDolGmGnQpMNMmts6PUti/wBNdyaszEzX6TBNtvgTs8Wu6a+HDbVnquguGCDxOk5O0/c8tPL2Ko+C90zqFzWPI6a4ZpOc4y/l+LXl8de6RxX0edZ8Cle62f1LBUr5qyy8rjjjhk5kMk4mwFSVoq5oWnsXWJyRuzn1AxtRj2exj6rDd7Ho8+PZmXqcV3sZWJrwZDVrYOjqDrJXkmgGWpQtCJwaZU10yGhckPaFSRpAUcc0cWHIJcghIQEhiFoYiKQ0SiESiKBBJgoJcEgSYaYtEoXCWcL+ZG5pN4IwML+ZG5oZ0kqOXzz0caeOPBZgqoRidpMsRR5m42h8XSGxdIRBjk9jNSW72KmTCnNtcFtK+QWkgCqsNeCXjSXA+gJKxwuMrVwq2aXQc9S7G37FXVwbi/YT0zI8erVulZ2+DTP5o/7Y6RKWDWRXP8nN/tT/AHowdNyj3XVdP+m9IzYkrk4XH6rdftR4bDynwejfg3PbX0vg0Y+DO0r4NGPCOXyfRBsGtwmQuTJTvAjPwWGtivn4Y4Izc3LM/N5L+byZ+bydHjUpS5BfAUnuC2dMMDFyDbFSZUKlTFMZNimy5E03SaXPrtXj02lxvJmyOkl+1t+EvLPqfQei4ejaJYYNTzTp5ctfrP29EvCPM/4OIxll6lNxTkuxJ1uk7tX9yPcThDJCUJpOMk00/Kfgd9elSPD/AGt+0yyd/TenZLhus+WL59Yp+nq/uPG2vVfifXYdE6TGu3pulVf/ALS/IfHp+hh+po8EfpiS/gIrOvjaTm6im36JWFkw5ccVLJinBS4cotJ/Sz7HPLpdMrnkw4kvVqJ4n7ea/R6vHo4aXVYs8oSm5LHNSq0qugKzkeNbBOs4EV7T/Bvqu3Ua7Qt7SUcsV9Nn+9HrOvaNa3o+owpXJQ74fVbr91fefOfshqP0b7T6Nt0svdifva2/akfWOVutvoGvbTPuPlemjbTXk2MMKSK2bTfo3UdRgraGRpfS7X7KL+KOyODX0SE62P8AkmT6HuMP8xj/AKC/ceK1y/yPJ9D22H+Zx/0V+46PB8Ux+tq9Rj/ofxMto1+sq8+P+h/EzXEw8n+9NXlEr5Y7MuSWxXyrZkwqxtev5CaS5VH0rTw+Fp8WP/dgl+CSPA/A/SNdpMH/ABM0U/pdv9iZ9DOvx/6lHzf7eZfidehBf+Vgivvbb/ijziRrfabN+kfaLWzTtRn8Nfckv3pmYkXWV+pirPaf4PVWPqP9OH7meQjE9j/g/VQ6j/Th+5hFZ+vUdQt9O1SStvFNJeuzPmcMGdJfyGXj/cf5H1STUU22kkrbbpJCP07R/wDNYP7RfmFz1dnXzdYc/wDwcn9R/kGsWf8A4WT+o/yPov6bo/8Am8H9ovzO/TtJ/wA1h/tV+ZH/AJxP4fPVjzr/AMrJ/Uf5Dcayp74sn9R/ke9/TtH/AM1g/tF+Zy1ujv8AzrD/AGq/MV8Mo/H/AMvFY8lOnaa5T8FzHk9zN1eVPqOpaaaeWTTTtNWxuHIcHl8fEytjHOy1CZmYZ35LeOeyOLWeVpKvwkPiyljmWISJUsHARYYzdQDQZwwS4i5IsNC2ibCV5R2ETjRakhU0TSV1zTJRMo0yEaY3wkkpkHHXnZCIkdZDdo3mgVMhuoJe5MwZvZIjza5kq0OnJLHKRg9Ylc39T0GlXbpG/Y851N3J/UrweswX4wcztsQ0WM0GmxLVHZPiSntf0NNpRhCPpFL9hnSXJo5H8y9kv3Br4moUBuPTyljyZa2hVk4Y90kvU9F0/RRnppYmlU01deTk3ffII81p83dl7Gq97LOr0cdRgcZq01sxebSz0+scWnV0XsPFN7ejJvq9jXPqvC6zTPDlcbumVqPWdb6XcXnwq15XlHmMmNxdNUel4vJ+478Xs9E0TTJSJpmvXTmdckwkiUSkJvnIsatpG/0+Cx47fkwsezTfqbeF3BOLdPiybXX4Is5Xb2bV+ohZHGVp7pj1JNU0IyY3dx3QTrtz/wAej6D1r4eRYsz52v1PY4csMsFODTTPlUE001s0eo6B1dxnHDmlz6+TWXryP5/8GX/7njexOBjJSimnafARUrwHHHHAEMCStDAWjPUCplhaZn58d2a047FTNjtM57Cr5gkEokpDIwMbWIVADJiTXG5ZUPYL4dkzRsfJBxfAho1c+G1wZ+SDi+DfGukrSQFDWgGjeUwhLkiiVyAEhiFoNEUjESiESiKBolAoIRJRKIRKEDMTqa+puaLeKMPH+sja6e+Dn809HGvhtLYsxltRXxqqHJbnlbaw+G41ARVIO6MliT2Obs5cAvhsAhukC2heTJT5EvMl5LkK1OZ7NPyZqfZnTXhlnJlu9yjmnU0/c6fDOVlqvaaDJ8TTRp3seP6hpv0XqmfElUe7uj9Huv4/gb/QtQpwUb8Ff7TaesuDUpc3CT/av4no5vcqvvKjpvBox4RnabwaMP1Uc/k+lkxrYFLcPwQluYLS1sVc/DLjWxVzrZlZDMzLkz865NLMuSjmV2dPjUypvdgNjc0KbaEN0dcNDYmb5CkxGSexUiaCc9xLnuDknvyB3GsiVvS67WaJzej1ebT99d3wptXXF19S0ut9Y/8AVtZ/asykybGGlLq/U5/r9T1jX/Wf5iMmq1GT+c1Wef8ASyyf72VbZKZJDai3bVv1e5y22WwN1zsOwafPqHWnw5Mr9IQb/cIiziaJpkkPGmpKSbTTtNOmn6pmjhzaltN6vUf2svzKOGNtGjghbRjvVhxo6XunNSnJyb5bbbf3s1ca2Rn6KG6ZqQWyOO320hWuX+R5Poexw/zOP+iv3Hkdev8AIsn0PXYf5nH/AEV+46vB8qmZ1ZXnx/0f4me1saXVFebH/R/iUWjHyf701eSK2RclyapFTUSUIOTdJKyckZ0DT/H628rVw08G7/8Aqey/ZZ62Tai2lbS2XuZfQdG9LoFOcay5n8Sd8q+F9yr9ppqUXJxTTa3a8qzuzOThPjmZZXqcrzprK5tzT5Tt3+0KMT1f2y6M8ed9SwQvHkaWZJfqy4T+j/f9TzMIk1lZxMY8HrvsCqh1H+nD9zPLRjbPV/YRVHqH9OH7mPKs/XpeoJPp+pT4eKf7mfM46bFS/k48eiPpmu30Go/6Uv3M+eqGyFo9K/6Pi/4cfwO/R8X/AA4/gi12epHb7EdQqvT4v+HH8EC8ONO1BJ/QtuIuUQ7SLTaZZw5Cs1Ryk0ydZ7Ca+HJxuXsWQxcOWvJew5eNzh8njVK18c9izjlfkzMWQuY5nJqcaSr0JDUypjmPjImKORwKYRRuoBoMhqwBLQpoe0A0TSV5RFONFpoW4k0EkNUG40D9S874XAsi9wmgGjpztIJg8tBS9zsauaXuLza76JpJdmi+qPN67ebPT51WkS9jzesjc2dfj9SHr4yM0E0ynODT4NLJHkqZYJnTmoU5IvPdp82kU5xaZdwpbN8KNt+xWvia0en4XOaSW7/Yj1uiwqEEkvBi9Gw90VNqmz0mGNJGEx3XV5z/AGzdfoYS12HN2pxk3GW3qmjJ1OkeHO1F2k+D1mTGskK8ppr6o871rFOOfujs3TsflxydXScWNSTjKCaappnlPtB0h6XI8mOL+HLde3sev0MHlai3TfqX9V0p6rTSwZaaa2dcMw8W7L2NfHv83r5A4NPglLY2OsdLyaDUyhKLST2Mtw3PTzr9Tr1/FJZ2F1YaiSokpUU3zP8AqKpmp06bk/hv6ozkm2avTMDtzS42QcdPjnL1d7FVPZ+oKTTpqqHTg2rSprlAP5kk0OR0SuUIyV8NE/ClHtnG1TtNeCVBpWraH43tT4NJE61x6z7O62Wq0jhk/Xxun7m0eJ6Vq3o9ZGXEJbT+nqe0i1JJp2mrQf2+Z/m+L/z8nZ8ojjmQDjSQccIBkrEZIWiwwJqzDUD5XGI+EAYRHwicVrFyh7BdnsNjDgaoexHVKWTFa4M3UYLvY33jtcFLPh52LzrlKx52cGm00KaNHU4abaRRapnXnXUlNHJUMa3Bo06EoJEJBJE0CRKIRKIoEggUchUhpkrgElMQMjsza6c7SMRPg1+myWxh5f8AURvYnsiwkIw00h+1HleSNodBpoMTC0rHLizJaVfBGR9sGwoi9QrxNLkAzc2ZOb3K8sl+SckKm0LcNzfMjK0Mpsq5pOti6sTb4Olpk48G2NSVNlP6Dmay02ei6vh/SOl5KVyilNfVb/us8toW8GpT4t0eywtZNOk901T+h3eOyrx848pp2rTXk0oP5UZ8cbw554nzCTj+DL2N/KY+UoeuDlyCnsFBWzmtWN8FTOtmXHsipm8lZDPyrkp5lSZey+ShqHszp8Zs/IrbTKWaPY2/BbyTVlbO7TOzJqc5pXuVMuT3J1EnF8lOU22b5ymicrZydtL1aQuybdp+jTNOE+gr/B0/PVf/APR/eNj/AIO8S/W6pkf0wpfxKq/wkZWtulQ/t3+QEv8ACLrX+p0zAvrlb/gjPmj9NXH/AIPunJr4us1UvZdq/gy7h+xPQsbTlhzZf6eV/wAKPK5Pt/1ia/k8Gjx//ZJv9rKWb7YdfzJr9NWNP/h4or9rTYuX/o7H0jT9A6Ppt8PTtOn6yh3P8XYWq6l03QYpQzavT4KTSh3pPj0W58lz9S6hq/8AOdfqcqfiWV1+F0IhBJ2kk/Umwfr/AIJLdhpbnJDIxtkWoOww4NDTw4KuGPBo6eHByeTRxo6SFJF/Gtgui6Fa7BknHL2dk+1pxvek/X3NRdHa/wDPT/8At/vIni3ffGkY2vV6OaS3apL1Z6zGu3FBPlJJ/gUsXTMUckZ5ZPI4O0qpJ+teS+2oxbk0klbb8HV4sXE9mzep754L0j/EotD9Rl+Nmc1dcK/QTI5t3urTJycMjQaP9O1ic1/IYWnL0k+Uv4sbj089XleLG6S/Xn/ur8zZS0+g0tbQxQXL8v8Ai2a+LH90harU49Jp5580qhBW/V+iXuz57j+0Oo0n2ln1HLcsOaoZca3qC4r3XPvv6mp1vqGTXT4ccMf1IP8Ae/f9x5XJHvyyTVqzab9s9V9YhLT67SKcHDNp80dmt1JM8X1n7OZtFOWfSReXTPelvKHs/Ve/4mb0XrGs6LNxxr42kk7lhbqn5afh/sZ7vpvWtB1OP+TZl8TzintNfd5+qsvk0frT5/jgen+xCpdQ/pw/czc1XSNBqpd2XTxU3zKHyt/hyR03pOn6Y836PKbWZptTadUq229ycyyiTlWdb/mWo/6Uv3M8Eo7L6H0HNBZcU8bdKcWr9LVGIvs1iS/zqf8AUQtS07HmqBaPT/8AhrF/zU/6iO/8NYv+an/URP5qeV5ZoCSPVP7MYn/rc/6iBf2WxP8A1uf9RD/NH5ryUkLZ69/ZPE/9cyf1F+Z5zq2iWg189NGbmoJPuapu0mFlibLFSE6Zdw5ONzPfIzHNp8mW8diW1hycF7FPjcxcOTjc0MOTjc8/yY4uVq45liEjPxTui1CRz8aSrcZDUyvF2NTAzDmQmSMwtWA0NYLWwuAloBoe0A0TYREoi5Rrge0C0SCK2IcRrjRFFZ1xNitkVKztOryr6h51SVHaSN5V9S+91CaOr2wpex5/VwubPQ6xfIl7GLnhc2d11zkGmTkx3exVyQaNbJj9ivPDd7GudoY2WF3sFnn8PBBJbySb+i4LmTTqwup6L4em00rT7sMXS8Xv/E3l6OPQ9CnGekxuLW6R6DHweI+zeqcE8De6e30PYafKpRTsqK8d9cXEZ/VtP8XA5JW4r9hdU1QM2pRae+3BW5NZ4tg9JjeZJ8pnolwY+jw/C18oeE3T+411wYfxs/noYH2q6dHU6N5or548nzTNjcMjVeT7RmxrLinjkrUk0fKus6f4OtyQqqbOn5Xp/wAHfe5rICjG3wGsbb4NfpPSZ6uadNRXLfk1kep6nuqmi0E9RlUYqk3u34PTrQLRwUEri+WWtJpcWHH8PHBKS3t87FrLkhkhWROMltT8o0zGG/P75n4x82BNXBb+fcqSwtO0vqjVlFqV02nw/AEsSTTqk+UV+W2PLYpwioJKStPkKOBzlUZUvcKWJqb7d0wkpwacVfsORV1/covgtNJ8rz6nq+kZJT0UVJ247WeewwU5rfxwel6dieLSpSVNuw1Hlfzd9zJVwgkhmdeW4444QcQ1aJIfBOvhvmONbItY4lfDukXMaPNrCGQiNUPY6CGqJmuF9gjNiu9i8oAZMdrgJTsYOpw3exkajF2SbS2PT58d3sZWqwWnsb+PbOxjNA0PyY3CTTQujqlIKTJSJokXQ5EkEiJyJBsmxASZKYJNiA0zS0EmmmmZaexodOdyr3M/J/qI9PpZXFW9yynZRwpqKaLcG20eT5GsWY8DFwLXAUW2YVoYuCJK9mEtkc1bDgZ2fD81oWsPqjRnBPwLUEnuhy0uKqwpPgNwSjuixSXgCaXay5SsZeaPZlUl6np+lZO/TJN+DzWq5NboWbbtb3PQ8GvSJ6pPVMXw+pzaW2RKX38P9xON/KW+u46eHKlw3F/fuv3Mp438pfmntV+nR4GQFwew2HBx6N0nsVM75LOR0U875CUKeZ7NmZqZ1e5oZnsZOqfJ1eIKc5u+RGSTaDabYEo2dsNRzwU7KE4OLNecL8FXLiu9jfOiUKJS3GvHTJUDTsICQaj7BKFBqFE2gCgF27hpBKJNpBUBiQahYax+xnrRBSHY47kKDHY47mOqFjDDg0dPHgqYI8Ghp47o496VGl0rVavp+PLDBixTjkn33NtNbJVt9DRXV+oP/VtP/WZRwrZFmCKz5tycXFhdS18tvhYI++7Ilkz5d82Vz9kqS+4GKCod8uterTQ0A1uMYEiTL6Xq46Setlkttzj2xXL2f4L3K+u1OXVT7srpL9WC4X5v3G5ElbS3KuXybfu2cTWbq9ov6GMo3Jv3NjWuoP6GXFWy8s79HCF+Bv6NCbTaprhrlBY47FiCK6Z2m1fUtMksGvyqK4jNqaX42X8fXOrxSTnp5+7xtfuZQihqQ/1TX/8AH3Vf9zS/1H+ZH+Puq/7ul/qP8ynRDQfqjtXX1/qv+5pf6j/MF/aDqq/0NL/Uf5lNoXIf6o7V1/aPqq/0NL/Uf5gP7TdVX+hpf6j/ADKE2Jk9g/VT2tN/anqqf83pf6j/ADMrX6vLr9VLU51FTkkmoJpbKvItsU2O20rbQkp0yLORNSs4Z00aGCd1uZUdmi9p220c3kz0Rr4ZN0Xcb4M/Bwi5jZyXDWLkGOiyrB7Dosi5UsJhWKTCTJ4ZlkA2c2SHMBolshskBYDQbAZNAWgGqGPgFkjitm8DtAryoTmW5Z6cryGvi97ib9WtWtvuMnJG5M1tW+foZ1W2dnkv+QqtKG1sr5FRcyFecb3KzS4oZY87FbU5J5Eu+TdJJX4SVIv5IFXLjuzozRxnYsr02oWWNquV6o9ZoOowyY01LlHlc+J77FfFqc2lyfK24vlG0rO9zex9DWsVchw1abqzx2Hqjmldp+5e0urlkyqraRF8nPR/t6iEE86yrzGn9S2ijosjap+UXkaYsaQSR8967pnk6lkUIuTcqSStvc+hIUtPhU3kWKCm+ZUr/E3/ADbyujweb/x1+uPGdO+yefIoz1SWKHLV2/7j076biwYYxwRUVFUjRohukO3h+T+T5PJe2sLLjnGpdqckqdeRkcK1WFJqsiVqzQy44Td8P9gl4WuE0/DW6Lz5M1U83Z/8sx4GsSUopO655FTwzqPbFNPzfBswwxap7+toL9HiqpUayytZ/I4wYadKbe9+gxYF37Ld+DXjpF3tpNt8+g+Gijdza+iH6PX8qKOh0Clk+JNVFftZspUqISUUklS9CSXD5PJd3tccccKoQcccQHEPg45vYi30b5lhfBexblDC+C7he6PP0xi5jWxYghGLwWYIyqxqIM4bDoomUdiTZubHtwUM+K72NnJC0ynlx8lSpsed1Wn3exnyhTo9HqMNp7GTqNO92uTpxv8A6zs4oUdQbi06ZDRt0gUSSwWASRZxzAnWSmDZ1iA7LvT51kKCZa0TrKvcnc9G9dpacEXIpJooaGdwRoLg8fyTla5+GrgZBV4E20MhK0YVoagktgL3Di7QAE+QGtg5cgN0ADwhU2dPJu0txE8rW7TKkTaTqIppitFqJYcqUXSsPNkTRUxNvMkt9zv8HWd+vRavPLPo2pb1TX3FXE9h2PHKWmVp01RWxulR0eafKc7fqxjdj48FbG9y0tkcO/qwZGUM73ZdyPYoZnuxQKmbgy9SrZpZWZ2fefsdXiCo4WC4FiiGtjo/RqcoewmeO/BelCwHjvwObDNlht3RCxVyjSeHbgXLHSNJslJ416HfDPTaPR9Ix9AwdQ6hi1M5Zcs4Vhl6N1t9ER3/AGXf+qdS/rL8zT2Hmvhv0Djjb8Hou/7Mf8p1L+svzJWX7MJ/5p1L+svzEOMGGP2HRx+xuLL9m/Gk6j+K/Mt6HT9C6hPNi02DWQyY8TyXllS2+jflozubRx5xYm/AePDT4LcMdpOh0cPscmtp4XhxtVsX8EN0WOkaXHly5c+oinptPG5X/pSa2X/v2DxxU8jkoKCbtRXCXoY7lklv9qh+KNJD4qgdPF5NZhwRVpu5+0Vz+X3l3Ngm80vh4JKCdKk6fuVnF/P6UQgqDWDL/wAKf4BfBy1/NT/Af51/xRTFyHThOFd8XG+LVCZFfARkexUyvZljUScccmuUrO1Ci+k6DKoJTyQbk0qbfua5z2WprD17qJSxx9i1r3vRb0PT9Jk6Zj12p1602Oc3BKULVpvzfszXMt+I4pwSSHRLi0vSFx1zF/Zv8w1p+kr/AG3i/s3+ZX4p8VYjUW4aHR5dPny6TqMM7wx7pKMK+nn2KSZNln0GEN7DOm6jCta9JrIReLUKoZGlcJeN/R/vA1uOeknkx5FU4efDXhj/AD6BbYqTNWebS6Xpugy5OnQ1E8+Luk+6qe35lSXVdEk2+hw2/wD3v7h/n/5JQmyvNm/k1PT49H0/UF0iEvjTcPh/Eqqve/PBnvq/TVz9n4P/AL39xX5KxlyYts3seTp/Uej9Sz4emQ0uTTJJPvcnb8nnm9ws4mivySgLCTJpGx5Rd0q3RShyi/peTDZxqYU1FMtQYrFG4IbFNMysaHxY1MREamZ3KjkwkxSYSZlcg2zrF2dZFyBNgtguVAuZjTG2C3YHcc2SBNkNgNnWLgLybtFzp6+dspS5L/T1yzbwz/OJ/tOrl8zRTSuyxrXUmUseRW0zp3P8hfqJ8ipKxmTkW2aZhkziV5wbLcqYHZe7NYfFJ6ZS3ZVz6WCfCNXI6Wxnaht2awXMJx6dJqkafT8aWQo6fIm6fJpaVqM01wc/l7Kz/L0OlSSRfRQ0k00qL0XaN/FVwaJSIRPg78lUMVkdIayvltIx814CJZKYWPJb5Ks21Ibhe55/7v6T/a/B2NQjG+B64PS/j30pxxxx1E4446xdNxxxxIQQSQRQ5gNkt0A2RSr5phfBewvdGZhfBo4PBw6ZRfwlzGuCphWyLmNGNaQ6KGNWgYoYkSavOGxWyQsvyjaEZIAGblx3ZnajDzsbU4XZTzY7vYqXibHndRhptpFNpp0buow2nsZeoxNNtI6ca6ysVGQ0E0QakFkEtEMYQcccwDkx+mlWWP1K9h43U0+As9G9f0+ScE0zUg20jE6XO0kbePhHj+ecrXJ3KDilQpbsarSo5Vwa5GWKTaYadoDQ/NlXNOtkWZNUUc0vmYyqO9IXkmn6FfJN2xTk75NcxFp04pp7iMEezOnzuF3NrkBSamn7nb4kX69ho0paZKk9jGzx+HqcsPCk2voafS53BK/BT6rDs1aktu+Kb+q2N9e8tL8Jwu2i54KGnfzl5vY4vJPYhWTgoZvJfyPZlPKk7FIbOzPZlDLuzRzxKWSDbbR04gVyGw3FrwLa3L6aHuHGKZCVsao0gAHEVKFpllxs7sVU2lY5TamizafT/ZLSPVaJauL1E0oOfbTt72J/xj0r/wCX4f27/IHQ9T1Wj0UdItPpM2OMnJPLFt23frXksf461H/p/Tf7J/mb3c/6RD6j0r/5fh/bv8jl1Dpf/wAvw/t3+RYXWdS/9ndN/sn+YS6vqX/s7p39m/zF+5/0ELqHTXx0CC/77/I0ej6rRZ9Tqoafpi0uVaeTc1k7rW21fh+AiPVdS/8AUOnr/tv8x2LqWp7pf5JosfdFxcscGnT97J/9cz7QzYYqS28DHBpKMI905NKKXlvZFiOPY0Oj6b4monq2rWK44k3Vy8v7uPvOLGb5NcIOpxrSaXF0/G7cfnyyX+lJ7/8Av7iNLp3PFkyppQg6bfl+iHz6dq8km7x983bfddXy6GaiCWBaXTuoY1Sfq/LZW89t3ucn9KK0d4tJqNa21LJ8mNrlJctfff4BRw6ieDHmx6/PPHJJ2p8PymMxqb02PFkUagqSSpFbHDPodQnpYvJjyOpYfX3Xo/c1zuW/iUCcdRFf57qG3skpbt+hZwrNo05Z9RlzZpLbHKdqC9X7lmWKOJzy4UpzWy3T+HtuVGnbbbbbtt+WXda8c932YZylKTlOTk/LYnvxfGWKeWGO033TdIbIRlhCf60U/qjLN99puzYNNkg4/wCMtKm1W81+Y3VaGGHp+j02bWYMTxRce6bpS44soQ02PP1HTadQVOffLbwt/wAjR6itP1LJoscpdqy/E+G9nbVbffR14k/PqJYWq6Zp8ltdX0TaTaSlu9uOQMST+x2ktf6zPn6sy9ZjWPU5IOCjKDaarho29A9Evsnpv0/Jlhj/AEidPFG3dv2e1WVmSz0lkqK9F+A1RXoi4pfZ/wAarWf2X9wan0D/AJnWf2X9xP5oF0NVp+sV/wAKP8SqmanT1039D6nLp+bPkk8S71ljVc1Wy9zGnNQi5NWkr2HufALPBZcbi+fD9C5l1ePqPR3+kZIw12mXa+508kfDXq/4/UPBpun5njhHq+H4mSkodu9vxzzZR6np1pM2fB3KTht3JVe1/wAQ9z6GrkzaHH0bpf6fDUTbw/L8KtuLu39ClPVdAaalg6h+z8yz2abW9I6dD/GOlwzxYalHJNJ262q9uCrPpWnkmv8AHOgX/wB/95V734Ku5c3RofZ3SSli1j0jyyWNJrvTt3e/HJlvU/Zrzg6l+K/M08vTMX/hzSaZ9T0ijDLJrM5/JO29k75V/sMt9E07/wBudP8A6/8AeO/fhXq3pZ9Ln9nusPpcdTHaPxPj1bfiq+88ze56LFptN03ofVsX+M9HqcmoUXCOKab2fFXvyebvcWk6GmHFi0w4rczqT8fKNDSrdGfj5NHSLdGOjjZwr5EPSsTh/VQ9ENY5KhiIStBpBw0olM6iCbkJs5shgyIuCdJinLcmbpCHOmYbwOndxHcJczu8z/A6d3e5HeJcwe/dFzxjqxyzS0CqLZmxdmpo1WJsPFP8xPqlr505GSstSuy71OdOX3mI8tM67jt6m321VkU0c2Z2PPTW/wC0tRy2i5nipTA6VClNWF3qi5FwrN5KOZXZbyOytk4KhqbuLtFvTanhN7lPKJU2nae4az+oz09joNUmkr3NjDkUktzwul1rxtNs9HoNbHJFNMxxbm8qJrlb6YSZXw5FJLcensd/j20cxOVbMcKyLYnze4GbmVOw8L3RGdbnYXued/8Akj+1/G9kPXBWxPYsx4PR/j1f9JOOOZ2E44g6yOhJDZxwrQ4hs4hsi0BboTOQc5UVpz3ZnaT5vge6NTTvZGThe6Zp6eV0cmkSNTCtkXMaKOGXBfxU0jGrPghiQMUNihGFrYVOFlhoBoVClkgVckLTNGcLK2SHOwiZWbHzsZmpw7N0b2XGtyjnw2nsaZrPUeazQ7ZPYUzS1enaTaRnuLXKOrOuxmBkNBNP0BZYCcyWQMBZKdM5kLkYei6Zka7a9Dfw5LSPNdKlaX4HpcCVJnmfyJ7aZW8avcamqFxdIKrOCxrBqmyX7ApNHOVCMGVuMWyjJt2y1mmmqRXSsaap5Iu7FNF+WO/AiWF3wazSeK9bENNNMsrC/QjJClwdPj2mxsdHnfbv7DutxdYp+jaf3/8A/DO6RkSy9t0a/V136GTV/K0/21/E7c+5xc+MfTO5mg3sjN0r+dmhexx+WexC8itFTInuWsjK83Ysw1DMrXBWa9jRyQUnYl4fY6JOQlFwsB40y68PsB8Fp8AaqsNboLsa5Ra7PY7sF01VQY7T67V9Nx5pYY4ZqbTfxYXVLxuG8e+wvVQrTZNvA87svo251HqOrwLSvT4tNWXBGc+7He75qnwVF1fqT/8AK0n9k/zLms1ut0uDRQ0uWEIvTQbUoJtuisuq9Vf+sYv7NG2/JJb/AJA/RdS1efp+uyzxadZcMoqFY6Tvm1e4qPUuqzVrBpmvVYG/4jtLrdTrum9Qjqpwm8copOMUtnuI6bny48k9HLXz0974W0mrb3TbXkV3bqSUjVr+qv8A8jT/ANg/zGx1nVH/AOTp1/2X+ZGaXVtPJrNrppeH2xpr60WdLPW4ks+s1c5pr5MNJOXu9tkTNW2z9X1/8GTknmzKL1EIxaTXyxaTK09HgSc5wSrduzQnLJlm55HbfC8L2QpS071cYanLGGOHzNS/0n4X08nH78nk5KBaHTR0WneVR7c+oXHmEPH3/wDvwPxwpcEz1GknklOWsxNv67L0JnlxRxxeLLGffJRTS4vyaeTG9X/4n/8AA6m5KEF3TfC9PdjEliTWN903tPJXHsgkkouOO+1/rTfMn+RVeoyafJJZdPLJidU4Va+4vEmf8c33/wBNXyYs+izvU6Nt3vkg3an9ff3LePJi12J5dPtJfr43ymTDVaLM6jnWOT/0cqp/tK2r0mXBlWq0klDLV7O1JejRrJZOb+BMtthORj/0iGrwLL2PHmTqcPf1RU1E1DFKT2SVkXPLwI0cvhw6hrvOOHwoP3e7/gU+oZJ4Oh9H1OJ/ymFd696otalPT9CwYHtPKnlmvruv3r8Cp1F//DnTE+Hjf8DqnqcTVX7R4oS1GHqODfDrMammvDpWv3ftJh/+jtJ//cz/AHsT0/UYtX9m9T0/UZYQzaSfxMCnJJyTttK+fKr3Rc0Oo0+n+yOlnqtI9VB6iaUFLtp297L59JlJ+waa9C6uqdJ8dCn/AG7JXVOl/wDoc/7dkfn/AOQPob/ybrP/AEY/xKLafJr9P1ei1Gg6r+h6B6SccK725uXdzX0rf8TCcg1PgN09LqmgpJf5RD96LP2iyLH1bVOXHev3IpaWV9V6ev8A+TD96D+1Mv8A8X1a/wDrX7kOT1C/poa3pXR9Fm+Dq+qPFkpOnhb2fHBVnovs60761X/Yf5Dvta1/jl7f+VD+J57I9nt+wLZLwWvV59L0dfZnRYp9TrSxyyePN8Jvudu1Xirf4GQ9B9m3/t1r/wDx3+QWqf8A8EdM2/1ifj3kK+zmLTdShrOl6jFBanJBz0+Zx3TXKv04f0sr+y+1Z0vROj6yOf8AQOrvNkw43kcVhrZfX3PPxlaT9Tc+yMZY9f1bHki4zjo5Rkn4adNHn4PZfQWomrMXsNgIg7HwMaR2Nbo0tGt0Z8DS0a4MdVTXxfqofETjWyHxCNIYkGkDFBjNxD5JBfIwhugGyWxbYuEHI9mVZOmPyPYqye7IuSF3Ed4DYDZP4Brn7gqdtCnI6DbmvqV+PRNHG7o2dKqwGPhXBt4dsH3HN4p/9xWWD1Z1f3nnps3urvZ/eedmz0MxGvofiNMt6fUp0myhPkX3uDtM0/PS7xu9+1phLJaoysOspU2WVnTVpon8rmlqTtFbIyfjJrdick0/IcX0nI7sqydMfkklyylmypWky5EWrEZ+5d0eqnhmpJtryjEx5X31ZewzTSMfLjjG+3uenayOWCafJs45ppbngumap4cyi3UX+xnr9Fm70qdi8e2mNf1WlYue6CT2BktjfV7GihnQvE6Y/OitB1M4N+tIrQxPZFmJVwvZFqD2O7+Pfap8EQ2S2C2dmqHNnWC3RCkY/oDs6wbIch/odE2kLnOkDKfuIyZElyTdE7Lkq9ypPJudly3e5Unk9GZ2l14bC+DR0890ZOGXBoad8GGkxtYJbIv4ZMzNO9kaOF8GFWvY3ZYjVFbGPixKG0C0GnZDQETKImcLstNC5REShkhzsVMmPnY1MkLRUyQrwBVk58KaaoysmmSk9j0OSCa4KebCm7o0zrjOxhz0/sIlga4RtywewqWn9jWbTxiSxteAGjXnp9uCtk03saTyQM9oGi1PA1whMsbW9FzUpL/TZ9rXsz1WkyXBbnkNE2ptM9HopvtW5yfyM9XmtiMxsWVoSQ3uPN1GsNlOlYmeSkdOdIryn3bWQLXNuTHYoXVgwjZYxrYCgHBAvGvQsdtsGSoUVxWcK3QjMqTLrWxVzxVM38d9ppOiyKGoX1PTahfG0ORLdyg1+w8jFuGZP3PW6KanpFe9cnp4LLz+le9+peTtFHHB48s4PmLa/BluLtGXlns4jI9hLGTYpuycw0NEpWQtw0jUI+Gn4IeFeEPig0k/AGovD7APE0+DS7E/ALxIVgZyg14FauH+TZNvBqPD7FfJjwSmsOonLHCSdzUbr2omT2F3U6vU6fBo4YMeCaeng28kW3deNxC6jr/+Do1/23+Y16nR9mOPwdTqXjioJySgqXHoctXlX8xotPiXrO5v+Brvd7/tJDHo9TqNbodbHJhxxlGUUvhRa7vzKuo0acezPjabVpNblrSa3U5sGrhn1CjkTSx9iUWl5onR5pJTyZFKedSahKbtRXr9eTPy/nVl7/QTp8j6Zo1HqWR5U5J48bj3SgvVv/3RZnjWX/KMeRZsc91NeF6Cfh97byPvcuW97ExxZ9DN5dG7g954nw/yfuRfLnyz8X1AuKBPwot24pv6DITx5sccuOLja3i/DF4smdKSenUpW6cnSrxsYTH+Vlpjjhg3Sgn9EO+AlHeCSXCYqtTP9fN2L0xqv28h48McbbVtvltttmnPHJ6ttPiFlxvM8TklNK0m6v6DJY2uVsKz6XDn/nIJv18iFpdRgX+TaqaS/wBGe6/aXn/zs5fRGZtPiyJrJjT+qFrHHHBQgqS4Xoc9ZqMe2q0imv8AfxOn+D/MnHqNHqJKOPN2TfEMipl/+d//ABvQRJJXSKeoxvUZsOmXOWaT9ly/2JlzJs2n4dFPLFrLHLGcoThdNOmrDF5fYD1vKsmbMl+rBdiS8Jf32UOrT7fs50r3xv8AgHq23jm27bTtvyU+tZ8cvs/0qEMkXOMGpJNNrjleDpz7lqawk1bdKzei/wD4L0j/AP5U/wB7PPRez+h6Dp+o6Zn+zmDQ6zqC0uXHmlNr4bk9264+ppJ2UozlILuL60vQv/X1/YM56XoX/r6/sGL80GdAlej62/TAv4mW5mtpsnR9Bouow0/VVqMmpxdqh8Nxpq6397MCWRJNvwPUKtDpOny6zrekjiVrFkWWbfEYpptv933lj7W6TJHUy6hjay6XUNOOSDtJ0lTa+mwK1mn6b0F4dLnhl1utV55wd/Dh/u3671979il0brC6b36PWR+N0zNtPG1bhflL96+/kcn9D0t/aHqOLUdYx6vQZ1NQjBxnTq074Ymf2s62ntnw/wBghmbpXSZZJPD9otJHG3cVPdpejd8iX0bpj/8A6k0H4P8AMfNF7R/4u65x+kYP7BHf+LuuLd6jAv8AsIj/ABL0z/5l0H4P8yX0TpjVP7S6D8H+Y+aHtudH0HU8ep6l1PqMMKWq0jqeKSak6u6XqldniYcL6GyuidMim4/aTROlwk239FZjpb7ceNidlT8bLGPwVsaLONHPolrH4NLR8ozcfg09FyjDRtfHwvoOgJhwh8Co0hseAgVwEM3PZAvglgvZDgAxcmMbEz4YyIyySRXbTYeeVWVXPfkRHPgF7AqZzkmhgLZOJp5EJlOmHp3eVIWviWzgVtI2ce2D7jH0quSNjjA/ocfg/wBq0y8z1iW0jAk7NzrLpP6mA2ejhlfoZPkTJhTl4Qt7msSFtp7MmOaUfILBZXAsLVOt2BLVP1K7AYvzB2mTzt+SvKTbJZDKkCE6aaLunyXW5SoODcGmidTsJt4LklR6vouR/DSk90eZ6fDvxKXqej6cqSPOuvzvh5nXoYO0TIXidxQx8HVNdjf+lTMrRT4mXsvBSmqlZzeX6iruB2kXIcFHTvZF2HB0fx77ip8GwWwmwJukdnkvDKnKkAsm4GaVCFlVuzn6nq13+4Msi9So8tcMXLN7h+i6sZM3uVsubZqxGTN7lTLm9ybsunZM3O4h5LfIh5LJi7ZnddJ47C72NLT8ozNPyjU0ytorYjW0/g0cPgztOqSNHC+Dmq13HsPiytjY6LEZ6ZICYaGENAtIMhrYRK8kIyQTVFpoVKIuBn5INWVpL2NHJCynkhTAuK/YmC8SfgbRND6XFaWFPwInp1vsaNAuCfgctTxkZNMvQrz0r9DbljT8CZYU/Bc0XGJ8L4U06o2tA7girqsNY2643GaDIqSHr/LJT1WxjdVuMc68lZT2s74ls4dZadNyTb2Bxptg3bQ/HDdGN9D6fjhsNSpkQVIJO2QuD8APkLwQ2kEMuSK2VXZackyvlSdm2U1mzSWQ9N0md6dx52PN5dpG10XLdJ/Q9Lx31EZ+q+uh8PqGXalKpL71+YEJlrrUHHPjyeGnH8Hf7mZ8Z7leSHDZMW3vRzlYN7kScUbEbFCoeBsRmYkMSAQ1LcDSkT2hJE0ALr2IeNPwhrRFEUEPH7HKPsPqwXAw1ACMEnaSt8sdGCIjEdFGNDox9hqR0VsGkBoSS4VBpHJElcPjkiaOSJLikUQwgWMgSWxVy4sbabgm1w6LcuCvkHLxKrNlTK+S1kZSzM0ySnnaaaZ5zqLSnSSX0PQaiVJ/Q83rZd2Z+x1YTSU6GJr0QlMKzYju5eiIcl6IV3EOYAblQLmLcxbmHCNc64FOd8gOTbIsfCE6fgGl6I6yLAJpegSSfhAodjg5MVodGF8IdHG34HY8O3BZjhS8GVqplXhj4LUMfsHHF7D44/YztV+QY4bo0dJFporQhT4L+ljujGp4vw4HR5FRWwyPJUVD0EDF7BFGhgsJgSKBbYqb2GTaEZHsMlXUPkouVMtah7MoSluHCM7/AHOeTYQ5AynS5HImpyZN3uWNBPuymXkm2y90l3lZPknMVPfb0+jVtGtkdYH9DL0S3RqZnWnf0OP+P9raPIdbnSf1MFybNjrj3r3Mbk9LHxhfoWQEQ1sakCQDDYEgAJC2GwGMBZATIoAhKw8cG2kdGNssYYJ5EiNXgb3TIdsEvFG7pV2tNcGToMbpJI3dPjpKzxPN+rrsaZ+NHDNOKHN7FbFshzex1ePyX8+1k5mU8j3LWVlObI3squaZ8F6BnaR2kaMDq8F+Hn4YLnwMFz4O3yfDihqJNWU3Pd7lrVcMy5zak9zi1riKbLIJnk9wZTvyKk7Mr5E9DPK9yvKTkxrVgqG5H76Awi3yWccOAIQLEFwXKHhcKqjS0zVoz8KujS08ODfZxq6fdIv4uChp01Ro4VaRzVSxAfEVjQ5IQNQSBQSGEnNWccMAaFyQ5oCSAK8kV82NNXRbkhU1sIM5waOSGzjTYpqmSTqOaOTOsABoFwsazqH0cU9RjUsTVcoytHkcJ0+VszfnC4s8/wBjhrMkPSTNce5Yz1GvCdrkON2IxXSLWONmG5wQ7DG2WYRpoDDFIsJVTRyaaRPBKVbktWia2M1Id0JnJt0hz4EtXNsAVNuPkTPNapssZINop5cTbs6McTScsk2Xul5FGa3p2ZuWMo+BmhyOOVb+TvxPTOX29J1iHfo+9cwal/B/vMFOmelSWo0Tg91KLi/vR5VtxdPlbP6m19xp/ZrlsRGVsRPLSJxZLMzXoMdBlXHMfB2I4sxGxExHRAzEEkCglwI3Uc0EkTRNICVEpIKjktzKhHaHFbnJWElRjqAcQgYoNIUU5BJEIlFw0kkElw0MEJgsCoJ8FfI9h82Vcr2JSq5WU8z5LWZ7GfqJUmbYTVHV5KT38HnM0u7K2bGtyVF/QwpStt+p2eOIqWyLBb3Is14B2C2RYLYcDmwGzmyCgIhs5ENgSbIvcGwo7sKZuOPc6Ro6fDsthOkxXTrdmrjx9qRjqtMZBDHS4GqASW4aRlWvHRgMjE5IYkZ0cTGJc0y3RWii5pluiEWLcUGiEgiokcHsMFxe4dlBz4FyYbYtlQFyZXyPZj5FfJ5KKqOpezM5vcv6p7MzZPcciUt0hOSfgKUhT3LkTSsj8mn0dXJszJq0a3RY7WZ+f/8A50p9ep0S3Rf1TrAylolui5rXWCjj/jtp8eK627ml6syUafWXeVL3Zmno4+MP7RQLWwYL4NCLYD4DYEgBbAYbAYw5kJWzg4rcAOCpXRa0UHPOkkVvBpdKSUnJ83SMt/Dek0OFRitvBrYkkkUNGrijQxujjvjjaT0dFbB3sBFkyaoxuOfARllyVMj3HZpFZu2c+u9Jb0bNOD2Rk6R/NRq43sju/j34MnATWwS4Ilwejv8A1VGdqlszFzJqbN3Uq0zG1C+c8zzXiKrOyK2G9p3acfaRXaSojO05RLlCEh2NboFIZBU0bZoeHwR4NTTxWxm4FwjU0/g6NiNHDHgv4VwUsC2Ro4UtjCqPhHYYkdDgKhBKCRCRKRQEkccjqGHAtBAsYKkhUlsPkhM1sIKuSJXmi5NbFea5Jqaqt06OUgcyp2hSn6iHT7DTK6mNiwM2rTMDVrs6jLarSf8AD+BvxdoxOrLs1uOS5aa/B/3mvj+8Tr4uadWkXoQqilomnBGhAx8hZh2OI1LcCAyzk00FewSVoBRb5GqkiFFTtbLkFRa3Y1K3ZzQ5CLaQuUExrTBpmuSUs2FNMpwj2ZbXqa2RWjPzKpWjr8WuI1HounT7sFL0swOq4/g6/LFKk33L6Pf99mp0nI9l4K/2lw1PDmS5Tg39N1+9nXPcP+nn8srnVh43wIyJqV0MxvfciiL2NlvHIpY5bFrG0T1cXYOxyK0H7liLJ6ZyCQCYaYGNEohBE0kUdRJxnQKISQMQ0RYEoJEBLgmKiUSQiS4pJBxxQcwWEwWKlSplTK9i1kKmV8iQp5nsZmplszRzcMytW2kzbESxtfk+VoyWXtfK3RRZ34npCGyLOfBDZZushsizmwCCLObIbGEtgtkNkWPgTdjcKuSQpK2W9LC5oWvgjY0GJVb8IuOgMMVDEkTds5q6czkEg0gUhkURVDig0iIoNIikJIt6ZboqpblzTLdEIq4kSQiXwUlye4aewryGnsMkti29gmwJPYcBc2V8rHSZWyvkuJUNXLZmc3Ze1b2ZQbKiQvcitgkrJosiJLY2ejR+QyZI2+kxqC2MP5N/wKfXo9Et0WNe6xCtEt0H1F1iOf8Aj/G39PE9Wd6ivqZ9F3qjvUv6FM9DPxggh8EsF7Isi2A+Q2BIYLYD5DYDGHVYaQKCTFQKy/oMna0r8lAZhm1JURZ6N7bQ5E4o0IzR5rpurTSi3ujax5k4rdGLXOvTQU0ldgTyFR5vcGWbbky3BTMk7E3Yl5LfISZybyXVzSuppGvi3SMXTupo2cXCNv49PP09ES4CXBDPU3/qpS1C2ZkZ185tahbMyNQvm+88nz1NIrY7tCSDrY4yKo6hlAtFQBSGQ5BoKJvmk8Tg8GppvBlYXVGlp3wdOxGrg8GhidIzsEuC/idmClzG9hqQnGPSGEpbk0dRNFBCJOOGEMhok5gCpIU0Pa2FtCBEkV8iqy3JFbLHYmlVDOtrM/LNwbd7Gnmjs1Rl6mOzEiphmT8lnHkT8mL3uE6LeHPxbKueCVsQlaMzrMbnil6Nr/3+Baw5U0tyv1Rd2GMvRpjx6qr8Hok1BGpidmZoqcEaMNuDLy/U5WYBq26Fwe1jYcWctaGrgmtgU2wr2JU5W2S0SkqIdvYcAHyCwnswHJI1yVDNJoo54b8F1tNciMqTTN8fUaH0yfblo0us4fj9MyNK3BKa+7n9jZjaV9udfU9JBLJp+18NNP6NHbg8/HiZY7XApwcHwabwOM545LeDaf3MTkw+xnfpK+NlrHKkV+xxY2L2Jqou48nuWsc7XJmxdFnHNog19MYmVcc7HxYdM1MNOxUQ0xWgxHAp7hEUCXIaAXIaJMdEohEgcSiSESVFOOJOKCHwC+CWQxFSchTylyZTyrkUiKpZuGZGsezNfOtmY+t2TOjxorz+rVzZSexoZlbZSzRabOzPxJTYLZzZDZoaWwWzmyGxhDZDZ17gsZus7yRZMVbGDYK3waWhh86bXBTwwto09LGjLSpPbRiriSkdj3QyjF0QKQ2KBSDSJpmJDELQaM6mjXJb0/JUXJc0/JCKtol8EIlvYZUDYV7C73CTCJS2BJ7BSFSZUIubK+R7MdNlbI9i4Shq3syg3uXdW+SinbLiRpUjjr2IbGSHu6N7pkaxowVvJfU9H02P8mjm/lX/ABGfre0a4B6m6h9w3SLYr9UdRf0I8E/xa348V1B3qpexVHa2V6mb9xFnfn4wcDIIGTKhFsWxjYtlQAYITBYBBKZBxIGmNxryKW9D4qkiaZ2ObhJOLaaNjSa1TilJ00YsRuNtO06MdG9D8bbkF5vczsed0k2M+JZjqn1dhNt8liDszcM22aOE5thcwbTX1NvB+qjDxbSRt6feCNP431efqylsQwlwRI9jU/xNVzr5TI1C+Y2M6+VmTqP1vvPF/kFopIJLYFINHJCRQLQytgWioANEpEtHLY1zSeEx7F3Tzaa3KsUPxOmjr0Gxp5XW5p4HdGRpnwaWB8GFU0cbLEGVcTLUBwzKOJXBzKCAWSyHyMnEWQ2Q2AS2LZLdnCAGIyKyw0LmrQqSjlhszO1GPk18kbRR1ELT2EmvParG020JxTp02aOqx7N0Zck4T9jXN7OJ+NPT5OEO1b79M17FDBOvJccu7E17C5yqhnTZXBGrFWjE6fJp16G1jdpGXlnsodBNv2HpUhWN7Dk9jlrQcSWwYslbuyTFbOs7aiHwOQy5ySVlPJlbdJjdRJ1SKptiIo/iOuSHO0wAWbRIsc6yp+56TQz78SXqjy6dOzb6XltLfbg6c0ZB1DD26tzS2yK/v4ZSnj9jc1+PugpVvF/sZmTgTucqqz54/YW4UXpwEuHsR0dIWw7GwHGmTF0yaa1jdFmDtFOD4LEHsQcWYsNMSmMTEZqYSFphJkgyIxC4sYmBwSJRxwzSiTjhxSTjjioQWQwnwCxAnItirkRbmVcqCIqhnWxk6yOzNnMtmZmpjaf0N8VNedyQ3ZVzQtPY0ssPmaK2SGzOiaSyMiabBstZ4clRqmb5vQ5sFs5shssIbIbJbA5GaVux+KDdAY4Wy/p8VtOtidUQzBjpJ0XsKpgQgklsOgqZla1k4tYx6SaEY+EOTM2sEkSjkyVyKmJBJgphIipo4su6fkox5LunZnWdW0RJ7AuVIBytiTRBJ0LTCTAkti5Ow2xci4CsjK2R7MfNlbI9mXCZurdWU09yxrHyU09y4k5M6wLOsZGQVzS9z1HT4/JH6Hl8G+aK9z1mhVQX0OT+VfUh5+trSrYodWlSf0NDTfqmV1me0g8P+rS/Hi9TK8837i7Oyu8svqyGzunxzpIb2OsCTKAWwGEwWUAsFhMF7CCDjmcSBw3aRYRWx/rIsoi04NDIC0MjyZ01iPAxMVBjEzDRrOn5NPF4M3Trc0sXCOfYi3je6NrSu4IxcfKNnSP+TRf8f/Zc+ri4IkSuCJHt6/1NXzfqsyNQ/mZr5nUWYuqfzP6ni/yC0GLDXAmLGpnHwhkMiyE7ZUCSGiTjSB4VD4JpWKirZYhG4s69GuaZ7I1MD4MnTukjU074MKGliZag9ipiZaxvYIZ6exLIjwcywhgtktgsZBbBbJbBYglMkFHWBOkA1sGwWAJmtipmjaZekitlSdklWRqIWnsY+pg03segzxuzI1cFbsvN9oqjhnTLsJ3GjOT7ZtFnHPY01Di3omviNe5t4kqPPaefbn+pu6fImkZeWCfVxOhqtio7jVSOSxoNOtg1wLW7GLgXDcd4Is5ukOQ1fKrsqSjTLzV3YqWNMuekVUaBe5YeN2d8IuaLipNNFvpebtydrfPgDJj2YjBL4eoTfqb+Pafleua+JgaflUZkobGhpJrJgVO6K2aHbkkvc28nuSrUpwEzgXJRESiYEqSiLaplmcaETQh1MJFjHIqJ0xsGRVLkXY1MrwdjkyFGphJ7i0w0yQbFjosQhsQhnLgnkFMJFQ0olEI4qKSccjioSCGSyGKgqasrZEW2hE0LqVDMrTM7UR2ZqZVyUNRG7NM1NYOoi1NlfJG0X9XCnZSkjeVKhnhszPyRps1syTTM7NHdm2NEpyIsLIqYps6oEtkxVsFbj8ULa2C+gdgx20q3NbFjUIpVuI0eHiTRdUTDVbZiEg0mSkGok9aJgxqYCVHXQjPUglIr9xPeILCkEpUVlMLv9yKi1ZU9y3p5GYsm6LmmnaMqztXnK0SmKTsYuBEJE2DZ1gBN7C5PYKwJsqAnIyrmezHzZUzPZlwmbrJblK9yzrHuylZrPiT1Kwk9hKYSYwt6NXqEeu0aqK+h5Ppi7tSj2GlVJHD/ACr7PLW06rGYPWp1Gf3m/i2xfceY67Osc3fhmni/1itfHk5SuTfqyLAu2zrO6MRtgtkWRYyc2QySGARIFhMFkgLOOZwqBY9pItIqLZlqLtJmejhiDQtBoz6ZsXQxPgSmGuSKbR0+6RpYuEZum3SNLEtjl2ItY+Ua+jfyGRjXBr6L+bH4P9lz6vR4IkSuCJHuX/Q1bUP5TE1b+Z/U2dS6gzC1b+b7zxvOWwwYxMrwkGpnLxJ3cEnsJi7GplSAZxyORcN4rGty1BbblfGWcdHXozMCp17mpg8GZjVTNLA9kYaDQwvYt42VML2RaxhFLEeDmzlwQyicwXwEwGBBaBfITBYBB1nENiCbBbOsFsCc3sV8nDGt7CZvZioU8y5MvVxtM1MztMzNTwyomsTM+3IFCYOq/XFwlR087EruGX8qm/J6HSq4o8xjn86fuei0U32K9zHyT0GjB0PTTQiG6Gwo460hsXuFYtbB2Sp1pAt7k1ZyivQqEFtENoY4r0IcV6FADojbwE4bgOLXAgDItmZmeThks0sjdNUZWre5XjvNI09J0XP340m+UXdTH5k/VGB0DUVNQvez0eoVwteNzu+5sOe4oyQmcSxJbCZIwNWmivNFrIivMRENBQYLBumhURcg6HxdlPHK6LMGY1cWE7DixUXYyLJM6I1cCosbEZjQaYCCTKMSJITJRUUk4gkqBwLCBYqQZCciHPgXkWxKapZEUsytMv5UU8y2Y5U1kaqFpmZNVZtahbMycyps6M0lLLwyjmXJeycMqZVaNc32ln5kV/JayrdiO3c7M30ToRtl/S4nOS2EYcbbSRsabEoK2tydaXmdPxwUYpLwMSIQaMXRI5IJI5BoRhoiQbAbGKFsBy9yZMW2CaYpk9+wmyHIms6cp78mjo3aMdT3RraF2jDTP+2hF7BpiosamJQ0Q2RZDewB17gTZLYE3sVARN8lXM9mWMjKWd7M0hMzWPcqWP1ct2VLNp8SYmEpCVIJSHwNjoy7s9nsNKtkeS6DG237nrtKuDzP5N/zPDTusL+h5Dr8/wCSmeuyvtwN+x4b7RZKg16s6fF/R7+PP2TYtM5M7WRiZ1g2dYgKyLBsiwIdkPgGzrJDmcdZxIcizidxRWQ7C969SNGsINAINGdppT3GJ7gErlEU2ppuEaWIzdNwjSwnLsRbxrg1tF/NmVj8Gro/1EivB/sufV2PBEiVwRJnt2/4K/tT1b+VmBqnv95u6t7M89q3Tf1PH86NAjIYnuVYTdliLOeJPgOQiI1MqGYmSgUEuC4bxMJUWcT3KMWWMU9zs1DX1tNe5fwPZGYpqky9p5qkYahxq4XwXMbM/DOy9iZKlpcEM5O0cMkMFktkAQWCwmQxEF8EEsBsA5sCTObAlLYQDKQmcjpzoRkmBFZpbPczdTLZouZppJ7mZqMnO5eYms3VO5iUFmdzbBR1T4kyDaaPQdPncUrMGELNXpk6dPlGXkno69Bj4Q+Lor43cUOXg4bOKhqZPkGPIaIUklEElQ0nHJkNl8JDAbR0ntZXyZaDhGTSaZma3FdtFqWZ0VsuS00ys55eo1S+lyePUq3W57PE/iYE/LVHicTrKmvU9h06fdgSu9rO3J4LmhM0W8yqbrh7lWaMdT2pWmVplrIVZk8Ihi26DmxTYWA7HLgt43aKEHuW8cjHUUtxYxMRF7DUzNR8WNiyvFjYtAZ6ewSYtMNFQzEyUwEyUyjMOIOKNJD4OOfAUAYE+BjFz4Iqaq5EU8q2ZcyFTLwxSpZ+dcmTqVTZrZ/Jl6pcs3xUVm5irNbMtZSvNWb5JTyRsWsdstShZMMbb4OnNEHpcKW7RoRTYGLHSSLMIk29b5nHRi2NUAowoYokrK7SWqHdvsQ4B0yXwKk6HyQiSGOlti2w5CZOhpqXJIW5+4E5iZTt0KstVYg7kmbehfymFhe6+puaF/Kc+2caEWMXApMYmSsRDZIMgAGxc3sHIVJlQE5HsUM0tmXMz2KGoezNckytXLcrKQ3Vvcqpm+Z6SbZKYCZNjKvU/Z+H8kn6nqdKt0jznQ4Vpo+6PTaNW0eP5r3yLz8WtW+3Tv6HgPtFO2l6s951B1gaPnnX53nSO7xfYXkZFhJi0wr2OtmZZ1gJk2ICsiyLIsAmzrIOJJNnWQciTEmMxumhaDRNJbTtINCcLtUORjVQSDjygEHD9ZEUNPT8I0sPgztNwjSw+Dm2cW8fg1dH+oZWPwauj/UK8H+y59XFwdIlcESPZv8Aor+2frHszzesl87Xuei1r2Z5nWP+Vr3PK8zPQIclnGyrFjsbOYluLGJiIO0NTHKZyewSYKexKLlDwSY2EqZXsOMtz0bFNCM7iW9Nk43M2E9uR+nyU+TLUDfwT43L+KZjafJdGlhnZhYqNGE9g7srwnsMUgMbdENoFysFvYCG2gWwHKgHMRcG2KciHP3FSmIhylXkRKZE57clfJkryAFkn7lXJkryRkyc7lPNl9ypOpdny87mbqcmz3GZs3O5n5MjnJ+h0YykN72wo7sBMZBWzawLOFbFjA3jzKvIrGqQ5xqpLwZU+em/p8lxVluLsztE1KCdmhFKzh3PYh8OLDTAjxQS4MqsRyZB1bFQC8EMhWiXuiwXkexRyJ2y/KNoTkxX4DpWKE7SETtsu5MTXgS8Lb4D98TYrLZpno+k5rgt/ZmHPFS4LvS8zx5VFvZnT49dic+q9DqF8qfpsUps0JL4mB15Wxm5GVue+tScjKeRlnIynmZCaTN7i2zpSoU5pvkfCPxljGytjdss4zHUWtQY1Mrx4GpmNhw6LGxkITGJiUsRkNi7K8WNi6ZUM5Epgp2ghmNcEgolMYSQySGOmh8C58DGKnwZ1NV8nBTy8Mt5GU8z2ZMSoZ/Jl6ndM0875MzUPdnRhLPyCGh2R7gVZ0xJShb2RZw4kq23CxYqVsdGO5pGmYKER8IEY4j0gaxyQaRyQSEbkgWkEC2ALkkyvkVD5OhM2VBVeaKuV0WZuilnmkmOM7VfJkraxeNtuxc33S2DxqirPTGruHwbmh/UMPDyjc0X6iOXYi+hiFxGIlQwZBIhgZbQmaHtCZouBWyooZ42maU1aZTzQ2ZpCYOqhbKnY14NPUQuT2K/w16G0vpKpTQUN2l6se8Xsdiw3mgq8odvpNew6TFRwQXsej0a4MLp0KxxXsb+jXk8a3vka5+A6pKsVex8661O9VXoj3/V5VBr2PnfVJd2rl7Ho+L6nyfVDyEC1uSdTNKZJCJEHWdZxAgk444RJJIORNAkGgUg0RQODppllO1sVoplrDFvZmejEk2OxQ+ZErG/QdihTMrTXNOqpGjhKGFUX8KObRxbx+DU0n6iMzH4NTSfqI08H+y59XFwDJ7ErgGb2PW1f8VM3WvZnmNU7yno9c9meb1G+VnmeZnoEWPg6EIbBnLSWYMcmVovcfBpomUHxYaYpMNOi+m8EdYfYC4NHrqMhPYPHkqbViUqBcu3ITYG7pMlpbmrhnsjz+indbmzhnsjDUONTHPYap7clLHMfGRnYo/uOchfcR3CCZPYRPJToOb2ZRzZKb3Diae8nuKlkt8laWZeomeevIcT1ZnkpclXJm9xGTPtyVcmf3LmSpuXNvyVM2bncVkze5Vy5W20mbZwTs2VzbSewqyDrNpJCEixhVsrxLmGOwqaxjRYjG016icaLGNGdUs6DI18rfDo18bbS3MPH8mdPhM2dPJNLezk8s9p+VciHEXDdBrk51pXNhEIlclQJq0C00MSOaNAWwasY4gNNE0FSgn4FShT4LDFMzv0K8o7bicT+HmT9y3NWipOLU7NvEzr1Ojyd+Bb26KOqXw8so+LtfRhdJyJwSv2D6rClDIvo/4HX9yufGXkkVMstxuWZTyzomZSXkmIU7nyRlnyKi9zT8+iaGKZbxtNGbjnVFzHPgx1lUq9BjUVsc+B8XaOexZiYxMTYxMimsQY1FeDHxkq3FFQ6LoNOyvdvYdjexUBiCBTJTGBHEHAYXwKyPYbITNmdTVbK9ilmezLeZlLM+RSFVHUPkytTLc0dQ+TJ1Et2dXjiKrvdh4sdu2RCLlL2LMUkkjpOTrkhkFbQKQ2CG1hsUNSAihqQKdRxJzAIb2FthSfgXJjBc3sImxuRlXI6KTaVkmUNRO9ixmnSZSm23uVGeqWkOggEtx0EGkH4dmjd0S/kzExLdG3o/5tHNoRdjwNQqPAaZCzUcwUyWMBfIuSGMBlQEyVlbLDZ7FxqxOSFouEx9RjuT2E/D9jRzY7kL+F7GkpcUfh7cB6bDeoht5LTxew7R4f5dOuCdX1SsbmkjUFt4NvSKomTp1SRsaZVA8zxzu2kZnWZbPfwfPNa+7VZPqe961LaR4HUb5pv3PT8X1n5P8AZXa3ICaIqjoQ4444QccjiUiaTkiSaJSJtCKJSJolInpuS3Gwg2TjhfgtY8fGxndAOPH7FvHiqtgseP2LWPH7GVo4GOO1wMjCmNhANQIp8TiiXcK4K0I0WsSMNKWoco1NKvkRl4+UaulXyI18H+yp9WVwBk2TDXAvK/lPU8n+qmRrnszz2f8AnWb+uezPP5t8jfueb5PdZ0KGRAiEjn1CNTHQlRXTGRZlfQW4u0GmJxy2DscpvHWiaTOaOo9lQXC+CvmTUkXEJ1ULipLww6VO0cnaNvC9jE0a3Rs4VsjLf1UXoPgsRkVsfA+PBlVG2c2CcLhum9mY2uzdk+TWyOos811XLWRehcz1npMtQ/ViZ6i/JQlnt7C3mZrMI6uTz+5XyZ72TK7m35AtmkwByyN+Qbsg66L4STkRZKe4+AyCtl7EqSKeFWy9jWxnpUPxosQQnGh8DOqHOLeO1ytzR0T7oJ+pSx7osaKfZJwb4exh5Z6TfvWrDYalYrG00mNTORSUrYxLYBcjEXAlIlI5Ik1AXEFrYY+AG9hWBXy7K0VJZKe5Y1DpMz5NtkXPsqcsya3F5JpimmBJNMvOeM71r9KyVOrNjWY/jaOcVzVr6o81oMvZlSflnqME1OHrsdOV5+PJZJ+Slmye5d6tB6bW5cfi7X0e5kZJttmsym/QydslbEUSiuA2DplrHMpJ0OhKjPWTaWOfBYhIzsc+CzCZz6yqVdTGJlWMxikY3JrKnQyEm2ivHfyPx7GfFSrMOB0BEGNixwzkSD4JsZiRxCfgkAGTETY2TEZGRSV8zKGd1ZdyPZlDO+RyJrO1MqTMjK7mzS1bdMzHvJnZ4p6SZBUhqFRYxM24uGxVjooVAdHgFmRGLgWgkxGMhsiyGwCJMTJ7ByYmcqGXS8kinmnyOyzpOilklbLkRaTkbbFMZIWy0VCW42AtDYE1Kzi5RtaTbGjFxPdGxpnWNHPqexPq6mGmJixkSVnIJLYCIxAAtANDqIcRyghoGStD3AFw2LlClOFvgD4a9C1KG4LiUFZw9h+ix1kbolxQ/Sxq2Ru/4hfwrg18CrFZlYVbRrQ2w/ccXhn+ao8/1h2ps8Rlg3Nv3Z7TqzuM/vPKZce7Z6GGWvrPaAa3LOSFCGqN0AZxLIYByDSAQyK2IoSkSkElZKRnaEJDIQt2Co20i3hgRq8MWPHxsWsePjY7Fj4LeOCRjafHY8ZYjAmEB8YEq4GMPYLtGRic47io4iC3HwQpLcfjRjoLGNcGpp1UEZeNbo1sCqK+hv8Ax5/kqfTnwJzP5WOZXzv5Weh5PimPrntIw5q5M2ddLZmO+X9Tz9/WdAkEQd5MakaDiLTGJmWoZ0HQ1PYQmNi7RMN5aiKGUc0j2ll1udNd0GmEyGhFR6NUzYwrZGXpFUzWwrZGWvp5Wsa2HRWwrGth6WxFWmqIYVHNBARm/VZ5TrH84vqerzfqM8r1bfMkaY+stsuiKDaBaOhIGC2G0LZUJ1nEWRZXAKwlyAHDdhYFrAtrLkEVsK2Rbxrgxq4fjWw+KFQQ+KM6ZuMmX8nkjO6t02djXA3Lj78TrlcGevfoWdjQ0824J8lmLszdBk7saT8bGlF2tjis5ShqCTFph3sXDGmSmAjrZpAJuhcmw0rBktgClqHyUm9y/njdlKcGnwTamo+pDVkbhJWwmuFxEPkmmvDPQ6DMppepgODqy503O4ZVFs2zronpH2q07vDqUtv1JfvX8TzTR7vqmFarpmWKVvt7l9VueKlFPwdeb6GiCUG4EdtFEgNOgaYcYNk2A2EnsWsbK0ItD4OjHZxagxqZWjIZGZhYqLcGWIMqY7ZbhwZ2KixBjouivBjk9ieKOTJTFJ7hphwDT3Cb2AT3IbFw0SYjIxkmImxcJXyPcp6hclrI9ytldplZiax9WtmZr5ZratbMypKpM7fHPRCQcRaYxM0WdBjosrxY2MhGsJkpiVIPuEfRNkOQLYDkPhOlIRkYcpbFbNOk6Gm0nNPwVm9wpu2xbZcZ9QxbCbAbGSU9xsRKY2AqSzj5Rq6eXyIycfKNLTv5UYaEq9jdj4MrYuCzAzV06IxCoDkI0pE0ckFWwwGiGkHQLRUpkzjuKaofJWLaKlMposadVFe4posYVSSI8v8AqFzAt0aj2wfcZunW6NLLth+45fB7tOPN9T3T9zByY7XBv9Q8oypxO7KKyc2Or2KeSLT4NnLjTTKGbHzsaSs7FBrcBodODTFMskIbDdCfI7GTr4DUgkjkgkjC0JhG2X8MCrhW6L+GKpGWr7OLGOHBZxxF40WMaM1wyEaQ1IGK2GxQGlLYhoYlsBIKEJbjoIUluOgjGksYV8y+pq4lUUZmBXJI1Ma2Or+NPa4N8FXUOky0ylqnUWdnlpsTWy5Mxsva2e7XuUG9zh19ZBbIs5s5GdhDQaAQyJFhjQ2LpikGmZ/kPPtEMJ7IFnrRqA45nN7CqVjSLezVwrYzNGrNXCtkZVUWoDoicY9EKSQySBwE6j9Rnk+qO856vUuoP6Hkte71L9jXP1jr6ptAtDGC0awiWhUuRz4Ez5NYA2RZzZDLAkxuNWxUSzgVsnQW8S2RbxoRiXBZgqMKs+CHxVCYD4GdM2C3RZilVCID4uiDIi/0fUtf6Mt0aWLImk0zO1cXOCceVuM0ea4pN7ow8mf7Z31WqnsMT2K0J2kPT2MpVGRdnAphFdAgZHWC2P8AQJyKyvKFlpqwHHcz1QrfCTOWFrgsdgSRHaXFPInFCMORwzpr1L+SCaKGSHbktG/ivtN9PU6WayadX6UzxepxvFqcmNquybX7T1PScndiSb8GN9oMDxdSc0tsqUvv4f7jvxfR33GU0jlGwlCxqgkVbxBccd+BqgkEtjm6JttNDSOtIFzITsX5M5Mdj3ZWgWcaIsVFrHsWsfBUxsswexjYqHp0GpCUw0yeKOg7Y5PYRjdDUyeAdgNkOVC5SFwOlL3E5HZ05iZz25DgKyPcrzfIycrYmbNMwlTUbpmVkjUmaed8mfkXzM6cJJWwaYLVHWaGamEpUJTJ7hKWFOie9lbvryd8SvIcLqz3gufuV3k9wZZNuQ4Vps8iS5KmSdsjJkvZMS5lSItdJgNkOQDZRObAb3ObIW7KSZDdj4LcTBDomegfDku4HsilAt4XwY0NHEyzBlTE+C1Axqj4DkJgNTJ6ZkQgEwkx9NIMibIbLhltAtByBe44A1uOxrdCvI3Fu0R5r/iF/TK5Ivah1ia9inpVc0WtW6xmf8efVR57X7tGfJF/W7zKckdcRVXIirlhfgvTRXnHkpNZmWHNop5IU2amWHJSyw5LlTxTfI3G9xc1TDxumGvhLUeEEiIO0F5OamfhW6L+HwUMbovYHZlr6cXYIsQRXx+CxAmLPgthqFRYxOgBl0gHuybsEWgKKGxFpDYIyoWtOrmjTgqRn6VfMjRjwdv8WKjpcGdq5bMvzexl62VJm3kp1h6yVzoptjtVK8jK7Zy1i6wkAnbDRPANcjIoWg0yeGYg4gJhJiuTYLAbO7iG9jvWhsFvagXIHuuSQVNaWkVJGniWxm6RUkaeLgwq4tQHJCYDkSaSGccxwKuqdQf0PJ6t3qJfU9TrXUH9DymZ3lk/c1yy19LYD4DBkjWAmfAjJyWJFea3NcguiGE0RRYFDkuYFsVMa3L2GOyM9CLWNFmCEY1wWYIxqjYIdFCoj4ozqjIOhvckrsUuBOoy01FPkkLWF98/VAzh8HUWtoy3+8LSKoJvlh6pd+JpcrdCs76K/FzC00mWE7MjS6h0oy2aNHHO1ycevVTKsJ70GuBKe43u2I/RisiQLe5F2H6Nz5Oo7jc60LoRR2yObIF0ByVRR1CLs3SKWd2XjXKjS50jLU0mx32iw9+LDmS/VbTfs+DL0Obszretz0Gsh+ldMyJK322vqtz0cUs3seVqjjmwHIsht+4ty9wXNC3KypkhuRKluKsKLK4cWYMsY2VIMt4jLSotY2PgIhVD4Mxq4amGmAmGkTxRuNjHLYUnSOb2JAnL3FSkRKQqc6XIcCJz3EznsBOe4qU7HITpS9xU5HOQqctjSQFZnyUZ8ss5ZclWTu2bZiSpMCyZ8gN0XAKzu4W2Q5DBrkC50KcwHMfC6a8gqeR+opz9wHIcibRufuC5gN2C2PieicrBbIsixgVhRQC5DiKg2I2L2EobAzoOg9y3h5RUh4LWHlGdDQwvgtw8FPC+C5Dwc+jWI8Bpik9gkzPqjEye4VZ1jlBykTYnuonvKlA2wWwXMhyLiktljCrVlRy3RcwcIy899BpaNfMN1jqBGiW9+xGtfyleCcyr+mBq3eQqssah3lYhnTElSQiaLMkKmhpqlljdlLLFbmlkjsylmW7KiazcsabFxdMsZo8ldKmX/RLmN7DBOJ7DLOez2DsbLuCVFCDpljFKmjLUDWxS2RZgzOwzLsJ2iFyrSYakV1INTsDPTJW7Fp2g47kaI2KGwFRQ6HJkcXtKty8uCnpVsXPB6P8AHnIqF5HSZj6+ez3NbM6izC6jOk9w3S1WJmneRim7InK5v6kWYshLkamKQaYjMTCTFphpiMxMYmJQxMRvMuZDn7iu87us7+KG3Z2LfJYDlUW7D0yuSJ18JsaVbI0sS2Rn6ZbI0MfBzVcWoDUhUBiYjSQyfoC9yoFDqDrG37Hl57zb9Wek6pKsT+h5trc1yyv0NANbDaAfBpCV5iJ8lmZXnyaQwUdQSRyRXTHijckXsSpIrYY72XMa4M9U4sY1wPiJgh0TKmbBD4CYD4kUxN0m34KCbzaj2THazL2Ymr3YOihUO9rdhITQxvsVEZJqm2xTnTEajJSSTFIQ3d98eS/pcvelvuZuHJ4fA5SeGSnH9V80ZeXH6hWNmD3Q26KGHUqdNMtqdqzgs4IY3Ss5StC3NPYlNUIzE7RDRCdcAt7h0Oba9zlJVud5OdNAESaaKmZWmPm2hMnfJU9FVKFwypt+T1fTprJp0nvZ5jJFKVm50fKmqs7vHv1EZ+vPdRg9Nrc2JukpNr6PdFN5L8m39q9O4Z8Opitprsb91x+w87Z25k50ans7uOsT3EqRRHWFEQpDYSJpxZgW8a2KeN7l3DwY6VFmHA6IiA+BnVw2IxC4vYNOiFDboFs5sXJgESkV8k6Qc2VsstnuOQi5z3e4ty9xcp7gudIqQDcxOSewMpiZzvyXIQZu2Ikw5MTJ8mkIE2KbJnITKW5pAPuBcgHIBzHwhOYDnYDkC2PiKJsGwWyGyiE2RZFnWAScRZwASDiLQxMmgxMZEUmMiyKD4FvC90U4FrC6oyoaOF8FuDVFHC+C3CVJHPo1hMKxSkHZkYrIbBbBbGBORDnXkW5C5ToqA/4hPeU3kp8krIaQ+rXdc0rNHT8Ix8U+7Kja0/COf+RffDla2iXytide6H6RVjsq6+XJ0eKcyr+mFldzf1FsmTuTfuCzYkNCpDXwKkgBGRbMpZkXshTzIqIUMy5KjVMu5VyVpLcuFU42PTEY9mORlr6Qovcfje5XQ6BnYOL2GdUXYT2MyEmi3jnsRYa8phxluVVMZCe4uH1cix0HuVsbuizj3Zjo1iK2HY1uhMUWMatog1/Tr5VsWfAjAqihzex6Xi9YXPivqHSZ5zqc6TN7VSpM8v1TJu1fkz3UbrNbtkim6YSdkMjUw0xDkHCQlLCYSFxYxMRjTCTFoNDN5DuJTAslM7uGKb2SLWkVyRTW7svaRboz2TY062RoY1sUdOtkXsfBzVpFiHAwXDgMRuOOYLezKgrJ6tKsT+hgG11iXyUYprn4xv1wDWwYLNICZorT5LMxE1uXABBJW6ISGQjbGpYxLZFqC4E44liCMrTNgh0QIoZHkiqNihqdIXBHZprHicn4RIqlqpvNqVjXFmjFKEEl4Rn9Og55JZWuOC/Jjv8AxIZSpNmfmyXPktaifbBmRky/NY8zpL+PJVbl7DkUlT3TMOGbfkuYc3G47DX2p6efdFt43+wv4dV3xSRSxZVONPdPwQ8U8L78e8fTyjl8ni/XuFZ/xsQk2rYxMzsGri1Te5YWZS4ZxWWF1aUjm0yu8nuT8RBwdPvbYByYvvI716jmR0U26ETbDc0/IDaZcynpGSTLXS9R2ZUm9rK84pi8acMia2pm2JxPfb03WNP+m9JyRirml3w+qPBNuz3/AE3OsmJJ7ujyPX9E9F1Caiqx5W5w/ijt8OuzjS+4zLO7gGzrN0GqY2EyqmNgyaa/iluX8L2MvE90aOF7Iw0uLkWNiytFjYsyWsJhJiEwkxGbYLYPcC5AA5GVczqLHykVNRKoNlwqqOe4tzAcgHL3KkLo5SFORDkLci5CdOQqcjpSEzkXIA5JCXK2TKQps0iaJsW5HNi27HImpbIsiziuEmziLOsAmziDgCTjjgCUw0xaDTJoNQyIlMbFkULEGWcTKkCzjfBnQvYnwWYyKWNliMtkY6hxaUgu8rqQXd7mVgOcwXMW5gSnsLgE5ipzBlMTOZchOlk3OWX3K8p7gd5rmDrV0Mu7LZ6HTrZHnekq52ej064OHz/78Xn42NOqxIzOoSpS3NPHtiX0MbqU6TOvx+sxd+Mi7dkgJk2aklsVJhNi5PkAVkZVy8FiZXyDiaqZEV5ItzQiS3LhFRVMYkCkMiiKEpDYoiMRsUQExVD4NoWkGtmLg4epsbiduyqmWsCsnU5CXcXCLmJFXCuC5jWxy36Z8SxhVtCIotadXJBPql/EqQU3SZ0OAcjpM9HPrC2drZ0meU6lO517npNfOkzyetleV+xlplukNnRlvQDdsG6ewuIObJjJpi7tWSmLhrcHfA5MqY5U0WL2slUMUqDUiv3hRmAeYaaBbrYbHdUDODT4O5VRBWzR0i4KGNbmnpFwZbEauBcF3HwVMK4LmNUcyz48BAoIRuYEnSCYE3sy4VYPV5W0vcy2aHVHeVIoNbm2fjFAMg2gJFwyZrYrzW5amtivPZlwBSHYY27FRVst4YbBTPxosQQvHEfBGNUJIYkRFDUiVJiqKXUcjUI448yZeeyMtJ6nqNcqL/cGfvSrQ0uP4WnjHy1bDkyW62SFZHSbF/ZKOvyVGjInO3yWeo5rnSfBnOTOjGfRHwyU+S3hyu0ZsE29kXtPBt8BqF1q6ebdGlhk9jN08KSs0cLrk56fR5tLHL82N9kv2MR3Z9OqyQdeq3RdUkg1Mz1ma+jnVGOsTfIxalPyhuTS6bK7niV+q2f7BM+l4mv5PLOH1pozvglT+a56lJ7NHfpKe9iJdLzreOeEvqmgJaLVx4UX9JB/4cLlWP0pJ02MhqIvyjKy6bWR3eKT+lMSsmbG/mjJfVD/APKp9vQKSfoFVsydPq7aTZq4ZqaVMy13P040en5HCa3LX2g0X6b0yU4K8mJd8a81yvwKWnTU19T0WnXdhSluq3N/DrtaZj5c2A2Xer6daTqeowLZRm6+j3X7yg2d6LOXgk9xkGITDg9yaIvYnuaGKWyMvFLcv4ZbIw0qL0ZDVIqQl7jkzNawpBKQlS2J7hcM3uAlIBzFzmOQJnP3KeryfI9wsmSvJQ1WS4vcuQrSnMFzK/ffk5zNOEa5gOYtz9xbmVIBSmJnPYiUtxbZchObAbo5sW2VIlzkDZDdnFlU2dZBwEmybBJsAI6wSbEE2dZxwB1hJgnIAamNixKYyLIsCzBliD4KsGWIvYzoWsch0ZFSEqHRkZ2BZUwu9UV1ILuM7Ac5C5T2BchcpC4BSmInImUhM2VIQZSA7tyJyFp219TSB6Po8agn6nodOraMPpMaxI9BpVbR5nkvfI0z8ab+XD9x53qc6i/VnoM7rE/oeY6pPx7nfn4vSipE9wpM7uNCMbFyZFgtgAzYmW4yTFNjSVNCZLcsNWhTQApLcOETqoJcioGqQSYBKJ4DU6JTF2cmHAcnbRf0y2RnY92jV0y2Rn5PhLmJcFuCEY1sWYrY5KZkUXNOt0VIIvaZeS8TtUuJUhWZ0mN4RV1MqTPQvzi2L1KdQe55fPLum37m91XJUWedk7bMfrn1fYWwWSyGMkxdbMKxbCTfkVgOg9x6e1FfG02htkWGKyU2iFRIKYGN7lpw7oWVYLcv4F3QpnXa0k6qKNOjT0i4Kc4VP7y/pFSRjupaOFUW4FbEti1AxWcuCSFwEotiAWwMj+Vje1+gGWD7HsaQq811B3nr0KtbF7V4ZSztpFR45R5TRrKyJaBaoc0LmioCJK0Iktyy0Jmty4Ycatl7FGkivhjb4LmNbE6OGwQ5IGCpDYozqhRQ1IiC2GJEKJ1E1jwZJeUtvqVOlY2lPK+XsmH1Ob7YYly3bLWDGsWnhCt6tlf0TpFXUT7YNlmbMzqOTtxvfwPM7SY2pyd+VsVCDm+A4Y3OVtcs0tNpOG19x03UzE2k6fSt1tsaWHTqKW33j8WFRXAxtRW5za10cRGKiNWRLyUsuoS4ZWeq35J5aOtdZlfIyOa/JjwzNtblmGSx8NqRyX5GKe3JnxycbjVkvyBrimT3lZT9yVP3KkB7khWSEZqmrAc9jlNFcTVPUaaEXcVT9huhyOM1Fuws7TiVITcZpp00yPJ4puIj1mCGyfqrNrA6xpGD0nUQ1MFCU0pJUrNfKs2PDJ4kpzS2V1Zx4zvNdEnp4j7UTUutZmt1ST+tGI2XOpTyy1mV5045G22mt0yjJnp5npjv669xkXuIT3GRY6S3je5dxSryZ2N8FvHIw1DaGOY9SKMJUWIy9zKxUWVI7v8AcSpHOYjNcxGSdI6U6RXyTKkAcuTncztVktcj809uTO1E7fJtmBHeQ5iu4iy+A1zBcgGwWyuEJsW2c2A2PiXNgNnN7g2USTrIOKCbJsElcgEnJNhKLY7Hib8E2yESothKDLccG3AxYaXBP7Pij2P0OcGvBeeFehDxL0F+hxRaa8EFx4bXAuWFrhFfoEobFguDXgJIVI7GyxF7FaBYjwRQamNTFIKJFI5Owu4UmTZFgG5bANnN7ANi4ENipsNsVNjBUmdDeaXucwsCvNFe5XyB6vpkaxL6G7o1c0Y3T1WOK9jc0S+dex5X3bWfFnVusTPJ9Tnc0r8nqNfKsVHj+ozvPR6OT0R3HdwrvOUrNCNcgWwLIctgCWxbZDkA5bjKjBaJTtHMQLZ0TpcnRACRNEpWxijSEC0mwlAOkTYgPDC5I1sEaSM7TK3Zq4VsjDyUlnGth8RUFSQ2JzmdjW5o6dUihjVtGlhVRRr4Z/kqfTW9mZ+rlSZem6TMrWzpM69VVec6tku1ZjNl3qeS8le5QsiOe/RWQDZ1jAiLRFkWAMjKmqG997laxkG6omwLEW2Ngre4nH4LCdE1UefSqRe0r3r1KdfOW9L+ujp01z9MyxqfBb0q2QnKk5lrTR4MNF/a9iT2LeOFicMS7ihsZqg4Y0kthqgq4CjEYo34HIZPw/YXlx/K9i6oC8sPlexpIVYOTAnN7CZ6dNcGvLDbboVLD7Anjz+fSVulRSyQcdmj0uTDzsZup06d7FypsYjQuSLGaDhKmhDVs0hQzCi3BCMMdkW8a2IqoOK2HQQMUOgiKqCitgiUtiMjUMcpvwiTZ0l+kdRrlRdGjP0KnTYNueVrd+S4029x1KvNGRrovJNRRuZIWnSKbwLvbaKzeFVLS6RRSbRoQxqKuiUkkJzahRTSYrbRwzJkUEUM+pSvcRqNVzuUJ5HN87F5x0j8uocm0mKjN3bYCOWxpyQl3HPgtQye5nQlXkfGdeSLDaMcj4sbHJ7lCGUasnuLgXlk9yfie5TWTbk74nuVIFz4nuSplRZAvie4yPnO1RUc6YTyFecxyJaGj1bxZE02qZ7XpmtjqsSt3JLf3PnCyNO0zZ6P1B4cq+bz5J1n+22Nd9PQ/aLokeoYHmwJLUQVp/73sz55kTjJpppp00+Uz65p80NRhU4tNNceh4r7YdHcMv6fpoNqbrLGK4fh/eXi/wBHvPY8qnuFF7i97GRHWKxjZbxsqYy1jdGOjWYMdGRXiw0zMz1LY5zFWc5CMcp7FbLPZjJPYq5HyXkE5JbMoZm7LeR8lHM9zbMCLIsGyLL4BWQ2C2C2PiRNgNnNgjJxxxww4445DCUrDjGzoxtljHC2iNa4HYsd+C7DGkjsWOktixGNHPrYAoV4C7EMUQ1C2R+gR2ex3w/YtrH7BfCoc0FL4fsA8XsXXBLwLcUXKFGeH2EPHXg0pR24K+SBc0FWKpliHADjTCjwFI1BpgIJE0DOIOJpJsFskFkgLFyDYuQAp8jtGr1CEvktdOjedMN3maHq9EqgkbmiW9+xjaVVFfQ29Eqi37HmeP3ttCupOoM8X1Cf+UM9d1SWz38HidbO9TI9LJb+h77CUhCkT3GhHOfuC5+4ruIbAdG5e4LluC2Q2MjYSDfBXhKmOTtCCJBQTIYyKoQGkkS2RZ1iN2x1kWQnbSANDSLg08S2RQ0saSNHGuDl8lI+C2HQFwQ2KMTixhVtGnBUihp1bTNCOyOnwT+15BldRZidRnUXua+olSZ53qmSoP6G2hqvNa2ffne/BXsPI7m37ixRg6zrOYNjArRFgtnWAEtxuPkSmOxioWobINMVFug0yLFRj/6TLelXzJlWMbZewR7VZvq+mkHN3M0NMtkZ0V3ZDV00aSMNCL2FcF7EqRVwrgvY1sTFw2CHRiDjQ+KKhhUNgckLXA9R2ImtiwoPHyLljLjgLlEOBQyY/YpZ8S32NacCnmgt9g4mx5vW4OXRlNVKj0mrx2mYWaHble3JcrOzlHiWyLWNWJwrgt4oWKqFFD4RCxwQ5QRBwuqRU1rbxKC/0mXpRFrB8XOk+EIWu0mHs08VVN7sc4VyW3jUI/RCG7d+AIqUVGDbKGaajbY7WalRTVmFq9Xs9ysy0j9Rqkk0mZefVNtpO2Vs2pc20vxFJ3uzoz4+fSMcnJ22SgUg0i6SUiTkjiQlOg1IWTdCsCwphrJ7lVSJU/cXAuLJ7hKfuUlP3CWT3HwLqyE/F9ymshPex8C08linP3Fd4LmORJrkHhzOE1v5KzkD30x8OXj3/wBnOoJ/yeSVJqt/B6HLCOROE0mmqa9T5fodbLDNNNqj1+g+0EJQUMrTpbO90ZWcby9J6r9lIZJSzaOVNu3F8GVj+yutb+aUIr1pv9h7LDr8U0msir0boZPW4Irea+4f/pP7P8yvI4/stqYQlPNnhCMU3sm2zKS7ZNXdNo9T1frMJ6aeDE1vs3e55ZO3Znq9TqSHR4DQuLGIis02c2cQ2IwyezK2R8j5vZlXIzSEr5Hsyjle5cyspZHua5AbBbIbIbNAlshsizrGTjjjgJxxBww4OKsFK2PxxJ1eAeOBawwQqEdy5iVHNvRmwikhyQMEMey2Oe0OSoZBWxCdui1hjdB0H48dpByx7cBY0lQyk0KUKGSFCWi9kgV3Dc1miV2hM4F74doXPFsXKbNlHchbFrJioQ40y5SrkGgEEhEJEkIkRIbBYTBkIBYuXkNgSECnyXulxvLfuUXyafR43O/cny3mKHp9OqSNvSKsTZj6dbI2tOqwo8/wT/NvPrK6tKkzw2pneebu9z2nWJVGR4bLK8sn7npY+o1fYkwrFJhWakOzrAs6wCWyGyLIAkp0x8XsVw4ugHVlOw06QnGxjZJibIsGzmxATYWLeaQpsfpVc7FfhNbTqki/jRTwLZF3Gtji3TPih0VuhUUOgt0ZnF3TIucIRp1sPk6R2+L1lpFPVSqLPL9YyUmrPR62dJnkerT7p17jtRtkvdnUHRDQ5WRbQDVDWgXGxgo5DHAGhhyHwQpIdBcCB8V8oaREF8oaRNNnY4bj+I0RGG/AxQbKtaC08Lkma+COyKWmx00aWGPBjVSLeGOyLuNFbEtkXMaCKOgh8ULghyRpFJS2Ikg0iGi+AloVJFhoXNUgJUmtipmWzLmQp5mkmIqzNQtmYuqglK0bGpnVmRqJWxxnUYkti5hRTxuki1CdBR1ci6Di7YnHbLEFRnTGkmFjqDvyQuAGm37EkfKbl9CtnnUXQ6nVFbUNKDb9Bw3nOpal45NN7mFlzSyS3exc6tJzztv7jOR3ePMk6VGg0gEhkUXUmJBJAxTGJGdDqJo6jqJCGgWG0AxgLZHcQ2C2VIB9xKmJbOsrgWFMLvKykT3C4Sw5nd4juOUg4R7kDYCbOsD4YpNOx8NROL5KtnWLip6a2LqWWCqM3X1GvqOWapzb+8xosdBmdzF/qr3xpTe7Y2DKuN8FqBnU1YgGhUBqIAgWwvADewEXN7FbK6ssTezKuVlwlXIynke5ayPko5HuzfIQ2C2Q2dZoE2dZB1gSbOsg6wCbIs44AbjVsswRXxFvGtjLdM7GixAREfBHLoLMEMa2AxoelaOe32ZMY07LeHwKUNxuNUL9A9OhkWJQSdDlI1pMD4dsOO41R2LlLiu8VKxcoFxx2EziXNGo5IL0KuSFmhkgVskKNJQpuNEJUOlEW0WmoRJCJAkMFhMGRIAwJBsXLgQLZsdGjwZDNvo8dkzPz/6CfXo9OuDZxbYV9DI063RrrbEvocngn+TfP157rUqhN34PEt22/c9f12dYsjPHPk9DDLX0SYVgIlM1IVnWRZ1gE2cQShBxyOOAjYOhndsJiFYqY+47uAsFsAZ3e5d0St2ZydtI1tDDZbGe/UJq4Vsi5jWyK2FbItwVI4dfVmxQ/ErkhMUWcC3FPqov4VSDyOkzsapAZnSf0O7M5lpPjL106T+h5PXS78r9j0vUclRZ5fM+7K2Rb7ZaJSOcQ0vYLtDqSHAjsLHad2Ico4R2APGW+0FwK6SooUxsE0NcF6HKFB0hY+BqQEVQxLYQBHHvwMji9iysVPgNY/YVrYOHHTL+GO6E44UuC5ijwRVLGNFjGtxONFjGtxwz4IckLgNRpDEiGSuAWywGQqb2GNiZvYAr5XRl6rJ2+TQzPZmNrW5WkJFUs8+663M/MqtsvbRTvkoaqVscRQQnTLWFt1ZRxptpGnp4LZi18C5iWyvYcmvHgUltQSXhGVM2Lt0PhBPkRBNVsPi2lYjFKCrYzNfFrG6NFzpGfrZppplQPG9RVzZnpGt1OCttIy63O7Hwq5IbBAxQ2CHSEkGkckEkZ0nUdQaR1CBbQDQ1oXJBASxbGyQqRpAW2dZDYNlgdhWKsJPYANMNMUmFYgOyUwEyUwA7OsGzrEo2L3HwZWi9x+NkaC3iZbhwVMXJbhwYUHRGIVEZEkJsFsJgNgReR7FTK+SzkexUyvY0ySrlfJRyP5i5lezKU3uzfIDZKYIRYcSDZIiSdZBwBJy5IOTGD8bLeN7FPG9y1BmGzWYblnGivjexahwcuzPgOiJixqZz6M1Uw0hSYxMyoMT2OshMJLYcopmN7lqLTRUiqHQkawj6TQqcAlJnN2ioFecLEZMe3BdaFTiXKTMyQa3K8lTNHLDZlHLGm2aykUcccNKGDIJgsABgS4DYEgBT5PQ9JVQR59btHpOlKsaMf5H+p5+t3TLdGtN1if0MvSq5L6mnm2xP6HP/AB42jyfX51imeTo9P19/yTPNHfj4w19QSccaB1nHEpAHJBJWEo2EoiAEjqG9p3aLoLSJoOgWLoQwZBMFjCcS7ppe5vaKFJGNpI92VM39LCkjDzUT6vYlsizERjVFhI4v7WbHkt6dWypBbov6ZcF4napcSpFbUSpMsPZFHVSpM7L8XfjE6pkqLVmClbNPqeS20mZyXkyZOSColKjhhFHUvQlEpWOBCR3YGkEojSV2Hdg9Rvwd2ACVAlR2HKJPYPpLyx78BLHvwPUA+z2JroKhClwOxqmEobBY1vwSZsFSHY+RaVIbjRUCxENARDRpDFewDZLewDZQQ2IyPYZJiJvYCVc72ZlZt22aWZmZqHSYIrO1E0m9zPyO3bLedtypbgQ0s57tOg6z72l4cdtUjTw46XAen0lJNqh8oqKpIzt6qQtypUhmJb7im0nbZyzK6TFVLdpukMTVFWM/NhLJ7iA8rpGPq81yas0c2RdjdmDq8m7LzOhQ17tNmQ+WX9VktNFF8nZj4Qo8j4IRHwWIBojEg0gYoYuDOkijqJo5oABi5DWKkOGVIVLgbJCpI0gIkCMktwEi4bkiSUiaAkIJEIkQScjjgNNnWQdYA2DLGPwVYvcsY2Z6NdxFqD2KmFlqBhQemGmLXAaZKUtgNhN7APgIReR7Mp5mWp8Mq5eWXkKeZ7MpT/WLuXgpT5OjIQcccUBHAnAE2dZJwEiyTiEANgy1B2U4umPhIy1AvY5FuEtihjkWYT4OXcNci9h0WVYSsdFnPqGemGmJUg0zOwzkw1IQpEqZPAsqQcZleM78jYs0hLEZJoNbiIMamXIBNASQb4BZUBOSNooZocmjJFbNBMuEzWqZFDskab2FVRaaBgsYwGhkBi5DGLkABH9dfU9P0xVjR5nGryr6nqOnKsaOf+T/AKnn629IrmjQ1LrEylol86Leqf8AJMy/j/G0eM6+/lr3PP0b3XnbS9zDaO/Hxz36E4KiK3LDkhkINs6EW2PUe1IVoCopBUFRxKg0cSQ+BAL4BYTBYwBsBsJgPdpe4yX+nwtpvyb+njsjJ0GOkjawqkjj8t9nlZxrYfFCsa2HRRguGQVtGlp40ihhVtGliVRNfHPap9FkdIytbOk9zRzSpGJr8lRbOi1WmDrZ92Vq+BEeDsku7I2SuDNkJsghsixgcQ4oCIxABpBJEIJDJKRNI6MXJ0k2/YfDTTfNIVvPoISCUG+E39C9j0a2b3LUNPGPhEXy5g/KEhkYkJDYLc1boaqPAONbjci2oCC+Ymj+zq2Q3GhaXAyA4DYhIFMlM0hpbpCmw5vYRJ7FEichGR7ByexXyS2Aqr5pcmZqHyXc0+TN1E+UFZ0nFjUp21ZqYMKmltsZuGVNGrpsiSRPTzxdhpl2UlRQ1eGWO2tzSx5kluxOoaybeCbYvU/48xnzzc3FppEY8teTbyaTHkVNJlLN0vl4217BLGZCzbck/G9ypm0+owXcW0vKKstS1s7TLmen1e1GqqNWYupz23uTn1F3uZ2XJbe5rjBoyztiU9zm7ORvJwjI8jsfgTEdAnRHRGIXEYiCF4IaCOYjLaFtDWC0MENCpIfIVMuGrzW4NBTALNNHIk4CdRxxwBxx1ggE2dZ1kWMzIvcs4m7KsHuWMb3I0F3E7LUGVMT2LMHRhSWU9gkxUWGmRSE2C2SQwhFT4KmXllvI9inle5cCpmfJTlyy3mexUk92b5CDiCSg4444A4444CcECcAEmNgxSJTJoXMcixCZQhIfCZjrIaGOY+M7M+E6HQyHPrBrymGplRTvyGpmVyaz3E99CFM7vJ/IWYZNyxCdmcp0x+LJ7h+Q0oPYcinjndFqD2KkBiex0jkE0UCmhOSNostAuNrgcDLyw3ZXcaZo5se9pFOcdy00imA0NaBaBJTQpoe0LaH0Awr+VR6jQL5F9DzmCP8AKo9LoFUEc38m+lZ+tvRL50P1jrExOiVy+4brX/JE+D41nx4rrbvIl7mS1bNTrL/lkjMaO3HxhfoaJSJoOEVZZDxxpWyW9zm6VIgRus6yDhGmzmyCGwDmA2E2C2ABJg413ZEjpMbpI92W/cd9Qq2tFCkjWxrgoaWFJGjjR5+72qh8OBsULih0ERFRY06tmhHaKKmmjvZbk6R0eOemmVXUzpPc871TLUGrNnVzpPc8t1TMnLtvk0qN1TTt2GnSExmF3bC4gbZ1i3MhT3DhrEWMTK8XwPwwnkdQV+/gXwGJlzT6aWSnJUvQbpNEo03u/VmljxJKkjDfmk+HwjHp1FUlS9ixDGl4GdoSVI5dbtOQKSXgKjjrI6ZaQ2C3FxQ6CPXaIyIXFVIbMWv1iak5PgOIpPehqYRRiJsBM6zSBGSVIS5E5Zb8iZSGSJy2KuWdIPJLYqZph1FqvnnyZmebcqLOfJdpclKabdk9Z326OTtZbw6pKjOnsxfxHHyH5tOXj0C1ardhLWQe1o869Q0uQP0p3yyL46f6ephnT82PjJM8ti1ji07f4mhp+oJ0myLnWTlbbxQmqaRna3o+HOm1Gn6rYs4dXGSW9/eW4TUlswnksPkrwXUuj6nStygnkgvRboxJXbTTTXKZ9YnhhkTTS3PP9W+zuLUJzxrsyeq8nX4/PPlTZY8KwkWNboM+jyOOWDrw0tiukdMsvwS9MiPgIjyPgTQahiFRGIikYcQmSIwMFhMGQ4ZUhUxsnsJmXARPkgmXJBZuRxy4OsYcQ2dZAE446yANzZx1HDAo3ZYhyIjyPhyRoLmJ7FmDsrYixAwpHxYxMVEYuCKQmQ2TYDYEDI9inlfJayPYqZfJcCpmZUlyy1lZUlyzfIccQuCSjScQSInHHHAHHHHAQiUCggAk9xkZUJQaZFgWIz2GxnRUTGKXuZ3JLiyDFkKamGpmdwfVxZAu8prIEshP4NZ79xmPJvyUviDMUraFcBtaeV0X8b2MvTT2Ro45XRnzhrKCAjug0gDmjqJZFiCvlhaKWWFNmlJJoq5oJlSis6Spi2WMsaK7GktgsNgMZGYF/Ko9Jol8i+h5zT75Uel0a+RHJ/Jqs/WzoVu37E691jYWiSSf0A17+Rr2K8H+rX+niOsO9Ql7Gei91Z3qvuKK5OzPxzpSGLZAIKywJsghs6wNJxFnWHA6yGziLEHNgNhMBjh8BJl3p8Ladc7lCVtpGz07HSRHkvMp/tr6eNJF3GivhVJFqCPP0syCHwQmJYxq2giou6dUkxmZ0mdhVRFaiVJnVmcjSeoytdkpPc8dr83fqHvstj0nVMqhCTvwePyz7srfqypO1hu+z4TGqVopwk7GxlsPhQ5uzopykkk23wkO0miy6lppOMPV+fob2j6bDCk0rflvlmW/LnClDR9OnOnktL0NvT6WGNJJJV6IsY8SiuKQ1RXg4d+a6VwEIJDEiUqJRiYWqIbJkA7AOs6yDgCYjsa2ERH4+D2FomKvcZkYi9yaR6YaYlMYmEM2yG9gLIb2Lh0rLK2JlLYnJNJttlLPqVFNWPqLRZsqinuZubM5NpEZMzm+dhTaSJ91AWt22+RU2kHKaS5K2Sd8F5yVLyNFTK3ZZnwVchrIkptsG6CYLKNHe0xkNQ0+REgGxfmU2rh1rg1v+01tJ1JNpNnknNryHi1Ti1uY78HfhyvoWDVRmluWlUl6nitH1FxaVm/o+oRnSs5NZuGkq1rNBi1EGpQTT9UeR6p9nZ4W56ZNrntZ7jHkjNbNEzxRmmmrL8fmuU3EvuPlTjKEnGSaa5TQyPB7fqnQcOqTko9s/DXJ5LV6DPosjjli68SS2Z3Z8k1Ed59KQxC0HEZmJnMFBAAsCQbQLQAqXAiY+SEzLhkS5IJktwWizSCccMJsizrOqxkjkmiapHNgaGRZzIAGR5HwK8SxAnQXMXBZhwVsXBZgc9I2IcQIhoikmyGS2AwBeR7FTL5LWR2irkfJcJTyvkqvdss5Xsyq3udGTScccMOJIJAnHHHCDjkccBJRIKCQBJKIJQgJMJMFBIkhphJgRJFwC7iVIE4XAYpe47FLcrIfi5QrPQaumlSRpYpcGRhbVGhhnwcuopp43sh64KeGV0Wk9iA6TBOe7IewG5sVkVoNsFuxhSzQKc1TNLIrTKOaNFQldi2G9hcnsUk3Su8qPT6NfIjy+kd5V9T1Wj/VRxfylZbWjXyv6CeouoP6D9J+oyv1L9Rl+D/Vr/Tw/VHerZURa6k71ciomdufjnEjrIshsoCsiwWyLGY7OtAdyBlPakPhwbn4RHcKs67Hwzu4BsFM5uw4BY1eRfU3tDGkjE06udnoNEqijn83wue2liWyLMUJxLZD4rY4aocUWsCtorxRc00dx5+nF1KolHVzpMuzdRMnXTpP2OtpfUec63mrHJJ7s8wmzW63m7snZfkzdPhyajLHFii5SfhePcrPqdc190WHHPJNQxxcpPhI9D07o6VTzrul6eEXOl9LhpcabVzfLa5NiGNRWyOLzfyPfMrkKw6eMEkktvYsKCRKVMOtrOK21YFsSmQzooQGdwddENgEN7Arc5s5eBQOaBewbewtjCYofDZMTHkclsz2Vl5HyITtsbkfIlbszpHJhJi09iHkUfJUM263F5syhF7lXNqkk6exl6nWubcYsfUXR+p1e7SZQnNzdt7AW3u2C5pLZjk6gbkkhc8iS5FTyVshdtvcuQCc22CccaRNLnwVspanwVc3JQJYLCkBIZgkBINgSGZOR7CW6G5GKZcODhmcHyaWk1zi1uZDIU3F7Mnfjmob3Wg6ldJs39PqI5Et1Z820mrcWtz0vT9e9k2ed5PDc30vr10UpIr63puLVYnCcE016C9NqlNLfc0MWRNclYosl+vn/VOh5tFNzxJzxenlGUtj6rn08c0Gmk7PG9c6FLG3m08fdpeTqzv/AKys/P8A/HnkwgUmm00014YS3KHUNAtBsFgCpLYRNFpoRkWzKlCs1uC0Ma3BaNFFtUQE0C0UEUEtiDrGEtkMg4DcR5JIXIwZAsY1uivDks4zPQW8fBYgV8fA+BhSOiGgIhoik58AthPgBgCsnBUystZHsVMrNMhVy+Sq+WWMr5K7e5vPgcuCSESUHEkEiJxxxwg5nHWcBOCQISAJRKIRKEBIJcAoJCAkSRElCJJxxIg5IsYUJirZYxqqJ0FrHsXcMnaKWNXRZg6Oen1pYZU0yzGZnY8niyxCd+TI11NPc5ioT2Du0AC3uQ2c3YLZUCJK0VM0bRabE5FaGGbkVMRN0W80aso5tmaQqsaJ3mX1PV6T9VHkunu8q+p63R/qo4P5f08NvSKoMrdS/Uf0LWk/UKvUv1H9DTw/6tb8eF6l/ncirexY6k/8rmVLO3PxzisFyBbIbL4BWQ5ANguVIrgHKfoBYDluSmVxcHbZKBvYJAoSJIRIhxZ0kW5Weg0iqKMTRR3X1N/TRpI4/NSXsaHxQnGtixBHGDIIv6eNIp41ukX8bUIW2kl5ZeJ7VPqczpMw9fNtOi5repYcaaTTfueb1vUZZpOMHsb2jepPTK1GH42qdLvbdJHouk9OWnxptLve7aX7BHStFbWbIvmfFrhHoIQSVJHJ5vNefmIzP7DGFLgKqDpHVaOKrCgm6VA1RFN7jDuWGkAtgkxBL2AbsN8Cm9xBz3J4IXKOY58DnwA2E+AWOB//2Q==\"]}" http://localhost:8866/predict/ocr_system
diff --git a/deploy/fastdeploy/README.md b/deploy/fastdeploy/README.md
index 3c157b4cf..db02fd23c 100644
--- a/deploy/fastdeploy/README.md
+++ b/deploy/fastdeploy/README.md
@@ -1,12 +1,12 @@
 # PaddleOCR高性能全场景模型部署方案—FastDeploy
 
-## 目录  
-- [FastDeploy介绍](#FastDeploy介绍)  
-- [PaddleOCR模型部署](#PaddleOCR模型部署)  
-- [常见问题](#常见问题)  
+## 目录
+- [FastDeploy介绍](#FastDeploy介绍)
+- [PaddleOCR模型部署](#PaddleOCR模型部署)
+- [常见问题](#常见问题)
 
 ## 1. FastDeploy介绍
-<div id="FastDeploy介绍"></div>  
+<div id="FastDeploy介绍"></div>
 
 **[⚡️FastDeploy](https://github.com/PaddlePaddle/FastDeploy)**是一款**全场景**、**易用灵活**、**极致高效**的AI推理部署工具,支持**云边端**部署.使用FastDeploy可以简单高效的在X86 CPU、NVIDIA GPU、飞腾CPU、ARM CPU、Intel GPU、昆仑、昇腾、算能、瑞芯微等10+款硬件上对PaddleOCR模型进行快速部署,并且支持Paddle Inference、Paddle Lite、TensorRT、OpenVINO、ONNXRuntime、SOPHGO、RKNPU2等多种推理后端.
 
@@ -14,10 +14,10 @@
 
 <img src="https://user-images.githubusercontent.com/31974251/224941235-d5ea4ed0-7626-4c62-8bbd-8e4fad1e72ad.png" >
 
-</div>  
+</div>
 
 ## 2. PaddleOCR模型部署
-<div id="PaddleOCR模型部署"></div>  
+<div id="PaddleOCR模型部署"></div>
 
 ### 2.1 硬件支持列表
 
@@ -27,62 +27,62 @@
 |NVIDIA GPU|✅|[链接](./cpu-gpu)|✅|✅|
 |飞腾CPU|✅|[链接](./cpu-gpu)|✅|✅|
 |ARM CPU|✅|[链接](./cpu-gpu)|✅|✅|
-|Intel GPU(集成显卡)|✅|[链接](./cpu-gpu)|✅|✅|  
-|Intel GPU(独立显卡)|✅|[链接](./cpu-gpu)|✅|✅|  
+|Intel GPU(集成显卡)|✅|[链接](./cpu-gpu)|✅|✅|
+|Intel GPU(独立显卡)|✅|[链接](./cpu-gpu)|✅|✅|
 |昆仑|✅|[链接](./kunlunxin)|✅|✅|
 |昇腾|✅|[链接](./ascend)|✅|✅|
-|算能|✅|[链接](./sophgo)|✅|✅|  
-|瑞芯微|✅|[链接](./rockchip)|✅|✅|  
+|算能|✅|[链接](./sophgo)|✅|✅|
+|瑞芯微|✅|[链接](./rockchip)|✅|✅|
 
 ### 2.2. 详细使用文档
 - X86 CPU
-  - [部署模型准备](./cpu-gpu)  
+  - [部署模型准备](./cpu-gpu)
   - [Python部署示例](./cpu-gpu/python/)
   - [C++部署示例](./cpu-gpu/cpp/)
 - NVIDIA GPU
-  - [部署模型准备](./cpu-gpu)  
+  - [部署模型准备](./cpu-gpu)
   - [Python部署示例](./cpu-gpu/python/)
   - [C++部署示例](./cpu-gpu/cpp/)
 - 飞腾CPU
-  - [部署模型准备](./cpu-gpu)  
+  - [部署模型准备](./cpu-gpu)
   - [Python部署示例](./cpu-gpu/python/)
   - [C++部署示例](./cpu-gpu/cpp/)
 - ARM CPU
-  - [部署模型准备](./cpu-gpu)  
+  - [部署模型准备](./cpu-gpu)
   - [Python部署示例](./cpu-gpu/python/)
   - [C++部署示例](./cpu-gpu/cpp/)
 - Intel GPU
-  - [部署模型准备](./cpu-gpu)  
+  - [部署模型准备](./cpu-gpu)
   - [Python部署示例](./cpu-gpu/python/)
   - [C++部署示例](./cpu-gpu/cpp/)
 - 昆仑 XPU
-  - [部署模型准备](./kunlunxin)  
+  - [部署模型准备](./kunlunxin)
   - [Python部署示例](./kunlunxin/python/)
   - [C++部署示例](./kunlunxin/cpp/)
 - 昇腾 Ascend
-  - [部署模型准备](./ascend)  
+  - [部署模型准备](./ascend)
   - [Python部署示例](./ascend/python/)
-  - [C++部署示例](./ascend/cpp/)  
+  - [C++部署示例](./ascend/cpp/)
 - 算能 Sophgo
-  - [部署模型准备](./sophgo/)  
+  - [部署模型准备](./sophgo/)
   - [Python部署示例](./sophgo/python/)
-  - [C++部署示例](./sophgo/cpp/)  
+  - [C++部署示例](./sophgo/cpp/)
 - 瑞芯微 Rockchip
-  - [部署模型准备](./rockchip/)  
+  - [部署模型准备](./rockchip/)
   - [Python部署示例](./rockchip/rknpu2/)
   - [C++部署示例](./rockchip/rknpu2/)
 
 ### 2.3 更多部署方式
 
-- [Android ARM CPU部署](./android)  
-- [服务化Serving部署](./serving)  
+- [Android ARM CPU部署](./android)
+- [服务化Serving部署](./serving)
 - [web部署](./web)
 
 
 ## 3. 常见问题
-<div id="常见问题"></div>  
+<div id="常见问题"></div>
 
 遇到问题可查看常见问题集合,搜索FastDeploy issue,*或给FastDeploy提交[issue](https://github.com/PaddlePaddle/FastDeploy/issues)*:
 
-[常见问题集合](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq)  
-[FastDeploy issues](https://github.com/PaddlePaddle/FastDeploy/issues)  
+[常见问题集合](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq)
+[FastDeploy issues](https://github.com/PaddlePaddle/FastDeploy/issues)
diff --git a/deploy/fastdeploy/android/README.md b/deploy/fastdeploy/android/README.md
index 19ff8a019..f419ee264 100644
--- a/deploy/fastdeploy/android/README.md
+++ b/deploy/fastdeploy/android/README.md
@@ -25,7 +25,7 @@
 
 | APP 图标 | APP 效果 | APP设置项
   | ---     | --- | --- |
-| ![app_pic](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg)   | ![app_res](https://user-images.githubusercontent.com/14995488/203495616-af42a5b7-d3bc-4fce-8d5e-2ed88454f618.jpg) |  ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) |  
+| ![app_pic](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg)   | ![app_res](https://user-images.githubusercontent.com/14995488/203495616-af42a5b7-d3bc-4fce-8d5e-2ed88454f618.jpg) |  ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) |
 
 ### PP-OCRv3 Java API 说明
 
@@ -47,7 +47,7 @@ public PPOCRv3();  // 空构造函数,之后可以调用init初始化
 // Constructor w/o classifier
 public PPOCRv3(DBDetector detModel, Recognizer recModel);
 public PPOCRv3(DBDetector detModel, Classifier clsModel, Recognizer recModel);
-```  
+```
 - 模型预测 API:模型预测API包含直接预测的API以及带可视化功能的API。直接预测是指,不保存图片以及不渲染结果到Bitmap上,仅预测推理结果。预测并且可视化是指,预测结果以及可视化,并将可视化后的图片保存到指定的途径,以及将可视化结果渲染在Bitmap(目前支持ARGB8888格式的Bitmap), 后续可将该Bitmap在camera中进行显示。
 ```java
 // 直接预测:不保存图片以及不渲染结果到Bitmap上
@@ -58,13 +58,13 @@ public OCRResult predict(Bitmap ARGB8888Bitmap, boolean rendering); // 只渲染
 ```
 - 模型资源释放 API:调用 release() API 可以释放模型资源,返回true表示释放成功,false表示失败;调用 initialized() 可以判断模型是否初始化成功,true表示初始化成功,false表示失败。
 ```java
-public boolean release(); // 释放native资源  
+public boolean release(); // 释放native资源
 public boolean initialized(); // 检查是否初始化成功
 ```
 
 - RuntimeOption设置说明
 
-```java  
+```java
 public void enableLiteFp16(); // 开启fp16精度推理
 public void disableLiteFP16(); // 关闭fp16精度推理
 public void enableLiteInt8(); // 开启int8精度推理,针对量化模型
@@ -83,13 +83,13 @@ public class OCRResult {
   public float[] mClsScores; // 表示文本框的分类结果的置信度
   public int[] mClsLabels; // 表示文本框的方向分类类别
   public boolean mInitialized = false; // 检测结果是否有效
-}  
+}
 ```
 其他参考:C++/Python对应的OCRResult说明: [api/vision_results/ocr_result.md](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/api/vision_results/ocr_result.md)
 
 
 - 模型调用示例1:使用构造函数
-```java  
+```java
 import java.nio.ByteBuffer;
 import android.graphics.Bitmap;
 import android.opengl.GLES20;
@@ -119,9 +119,9 @@ recOption.setCpuThreadNum(2);
 detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
 clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
 recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
-detOption.enableLiteFp16();  
-clsOption.enableLiteFp16();  
-recOption.enableLiteFp16();  
+detOption.enableLiteFp16();
+clsOption.enableLiteFp16();
+recOption.enableLiteFp16();
 // 初始化模型
 DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption);
 Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption);
@@ -135,14 +135,14 @@ Bitmap ARGB8888ImageBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.AR
 ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer);
 
 // 模型推理
-OCRResult result = model.predict(ARGB8888ImageBitmap);  
+OCRResult result = model.predict(ARGB8888ImageBitmap);
 
-// 释放模型资源  
+// 释放模型资源
 model.release();
-```  
+```
 
 - 模型调用示例2: 在合适的程序节点,手动调用init
-```java  
+```java
 // import 同上 ...
 import com.baidu.paddle.fastdeploy.RuntimeOption;
 import com.baidu.paddle.fastdeploy.LitePowerMode;
@@ -151,7 +151,7 @@ import com.baidu.paddle.fastdeploy.vision.ocr.Classifier;
 import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector;
 import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer;
 // 新建空模型
-PPOCRv3 model = new PPOCRv3();  
+PPOCRv3 model = new PPOCRv3();
 // 模型路径
 String detModelFile = "ch_PP-OCRv3_det_infer/inference.pdmodel";
 String detParamsFile = "ch_PP-OCRv3_det_infer/inference.pdiparams";
@@ -170,9 +170,9 @@ recOption.setCpuThreadNum(2);
 detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
 clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
 recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH);
-detOption.enableLiteFp16();  
-clsOption.enableLiteFp16();  
-recOption.enableLiteFp16();  
+detOption.enableLiteFp16();
+clsOption.enableLiteFp16();
+recOption.enableLiteFp16();
 // 使用init函数初始化
 DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption);
 Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption);
@@ -192,10 +192,10 @@ model.init(detModel, clsModel, recModel);
   - 修改 `app/src/main/res/values/strings.xml` 中模型路径的默认值,如:
 ```xml
 <!-- 将这个路径修改成您的模型 -->
-<string name="OCR_MODEL_DIR_DEFAULT">models</string>  
+<string name="OCR_MODEL_DIR_DEFAULT">models</string>
 <string name="OCR_LABEL_PATH_DEFAULT">labels/ppocr_keys_v1.txt</string>
-```  
-## 使用量化模型  
+```
+## 使用量化模型
 如果您使用的是量化格式的模型,只需要使用RuntimeOption的enableLiteInt8()接口设置Int8精度推理即可。
 ```java
 String detModelFile = "ch_ppocrv3_plate_det_quant/inference.pdmodel";
@@ -214,10 +214,10 @@ DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption);
 Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption);
 predictor.init(detModel, recModel);
 ```
-在App中使用,可以参考 [OcrMainActivity.java](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java) 中的用法。  
+在App中使用,可以参考 [OcrMainActivity.java](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java) 中的用法。
 
 ## 更多参考文档
 如果您想知道更多的FastDeploy Java API文档以及如何通过JNI来接入FastDeploy C++ API感兴趣,可以参考以下内容:
 - [在 Android 中使用 FastDeploy Java SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/java/android)
-- [在 Android 中使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_cpp_sdk_on_android.md)  
+- [在 Android 中使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_cpp_sdk_on_android.md)
 - 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md)
diff --git a/deploy/fastdeploy/android/app/build.gradle b/deploy/fastdeploy/android/app/build.gradle
index de19b87c0..eda2d47b6 100644
--- a/deploy/fastdeploy/android/app/build.gradle
+++ b/deploy/fastdeploy/android/app/build.gradle
@@ -122,4 +122,4 @@ task downloadAndExtractSDKs(type: DefaultTask) {
 }
 
 preBuild.dependsOn downloadAndExtractSDKs
-preBuild.dependsOn downloadAndExtractModels
\ No newline at end of file
+preBuild.dependsOn downloadAndExtractModels
diff --git a/deploy/fastdeploy/android/app/proguard-rules.pro b/deploy/fastdeploy/android/app/proguard-rules.pro
index 481bb4348..f1b424510 100644
--- a/deploy/fastdeploy/android/app/proguard-rules.pro
+++ b/deploy/fastdeploy/android/app/proguard-rules.pro
@@ -18,4 +18,4 @@
 
 # If you keep the line number information, uncomment this to
 # hide the original source file name.
-#-renamesourcefileattribute SourceFile
\ No newline at end of file
+#-renamesourcefileattribute SourceFile
diff --git a/deploy/fastdeploy/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java b/deploy/fastdeploy/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java
index 0efacb790..42725022d 100644
--- a/deploy/fastdeploy/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java
+++ b/deploy/fastdeploy/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java
@@ -23,4 +23,4 @@ public class ExampleInstrumentedTest {
         Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
         assertEquals("com.baidu.paddle.fastdeploy", appContext.getPackageName());
     }
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/android/app/src/main/AndroidManifest.xml b/deploy/fastdeploy/android/app/src/main/AndroidManifest.xml
index 8493c0379..bcd2ba816 100644
--- a/deploy/fastdeploy/android/app/src/main/AndroidManifest.xml
+++ b/deploy/fastdeploy/android/app/src/main/AndroidManifest.xml
@@ -27,4 +27,4 @@
         </activity>
     </application>
 
-</manifest>
\ No newline at end of file
+</manifest>
diff --git a/deploy/fastdeploy/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java b/deploy/fastdeploy/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java
index 099219fa9..5e1ff4b58 100644
--- a/deploy/fastdeploy/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java
+++ b/deploy/fastdeploy/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java
@@ -30,4 +30,4 @@ public class ActionBarLayout extends RelativeLayout {
         setBackgroundColor(Color.BLACK);
         setAlpha(0.9f);
     }
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/action_button_layer.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/action_button_layer.xml
index a0d2e76bf..2c00151f0 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/action_button_layer.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/action_button_layer.xml
@@ -11,4 +11,4 @@
             <solid android:color="@color/bk_black" />
         </shape>
     </item>
-</layer-list>
\ No newline at end of file
+</layer-list>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/album_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/album_btn.xml
index 26d01c584..1174dc927 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/album_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/album_btn.xml
@@ -4,4 +4,4 @@
     <item android:state_focused="true" android:drawable="@drawable/album" />
     <item android:state_pressed="true" android:drawable="@drawable/album" />
     <item android:drawable="@drawable/album_pressed" />
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_start_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_start_btn.xml
index 664134453..b17efa784 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_start_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_start_btn.xml
@@ -4,4 +4,4 @@
     <item android:state_focused="true" android:drawable="@drawable/realtime_start_pressed" />
     <item android:state_pressed="true" android:drawable="@drawable/realtime_start_pressed" />
     <item android:drawable="@drawable/realtime_start" />
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml
index 8869a1b2b..d671530b6 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml
@@ -4,4 +4,4 @@
     <item android:state_focused="true" android:drawable="@drawable/realtime_stop_pressed" />
     <item android:state_pressed="true" android:drawable="@drawable/realtime_stop_pressed" />
     <item android:drawable="@drawable/realtime_stop" />
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml
index bd068f169..8c53dd753 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml
@@ -9,4 +9,4 @@
                 android:color="#E5E5E5" />
         </shape>
     </item>
-</layer-list>
\ No newline at end of file
+</layer-list>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/round_corner_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/round_corner_btn.xml
index c5dcc45d5..c40d4fcf1 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/round_corner_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/round_corner_btn.xml
@@ -7,4 +7,4 @@
         android:topLeftRadius="25dp"
         android:topRightRadius="25dp"></corners>
     <solid android:color="#3B85F5"></solid>
-</shape>
\ No newline at end of file
+</shape>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/seekbar_thumb.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/seekbar_thumb.xml
index 96bd95e0a..fce691ed4 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/seekbar_thumb.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/seekbar_thumb.xml
@@ -6,4 +6,4 @@
         <item android:state_focused="false" android:state_pressed="true" android:drawable="@drawable/seekbar_thumb_shape" />
         <item android:drawable="@drawable/seekbar_thumb_shape" />
     </selector>
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/switch_side_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/switch_side_btn.xml
index b9b2edfb6..7dd5f2f52 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/switch_side_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/switch_side_btn.xml
@@ -4,4 +4,4 @@
     <item android:state_focused="true" android:drawable="@drawable/switch_side_pressed" />
     <item android:state_pressed="true" android:drawable="@drawable/switch_side_pressed" />
     <item android:drawable="@drawable/switch_side" />
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/take_picture_btn.xml b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/take_picture_btn.xml
index 4966675c3..f85032b04 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable-v24/take_picture_btn.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable-v24/take_picture_btn.xml
@@ -4,4 +4,4 @@
     <item android:state_focused="true" android:drawable="@drawable/take_picture_pressed" />
     <item android:state_pressed="true" android:drawable="@drawable/take_picture_pressed" />
     <item android:drawable="@drawable/take_picture" />
-</selector>
\ No newline at end of file
+</selector>
diff --git a/deploy/fastdeploy/android/app/src/main/res/drawable/btn_settings.xml b/deploy/fastdeploy/android/app/src/main/res/drawable/btn_settings.xml
index 917897b99..adc761cbc 100644
--- a/deploy/fastdeploy/android/app/src/main/res/drawable/btn_settings.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/drawable/btn_settings.xml
@@ -3,4 +3,3 @@
     <item android:state_pressed="true" android:drawable="@drawable/btn_settings_pressed"/>
     <item android:drawable="@drawable/btn_settings_default"/>
 </selector>
-
diff --git a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_camera_page.xml b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_camera_page.xml
index 6f31c2c7e..43a027e33 100644
--- a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_camera_page.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_camera_page.xml
@@ -157,4 +157,4 @@
                 android:orientation="vertical"></LinearLayout>
         </LinearLayout>
     </RelativeLayout>
-</android.support.constraint.ConstraintLayout>
\ No newline at end of file
+</android.support.constraint.ConstraintLayout>
diff --git a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page.xml b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page.xml
index 958a85940..08e5bbf61 100644
--- a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page.xml
@@ -157,4 +157,4 @@
 
         </FrameLayout>
     </LinearLayout>
-</FrameLayout>
\ No newline at end of file
+</FrameLayout>
diff --git a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page_item.xml b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page_item.xml
index 6a2b09ebf..d05b7850c 100644
--- a/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page_item.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/layout/ocr_result_page_item.xml
@@ -23,4 +23,4 @@
         style="@style/list_result_view_item_style"
         android:layout_weight="0.2"
         android:layout_width="wrap_content" />
-</LinearLayout>
\ No newline at end of file
+</LinearLayout>
diff --git a/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
index eca70cfe5..6b78462d6 100644
--- a/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -2,4 +2,4 @@
 <adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
     <background android:drawable="@drawable/ic_launcher_background" />
     <foreground android:drawable="@drawable/ic_launcher_foreground" />
-</adaptive-icon>
\ No newline at end of file
+</adaptive-icon>
diff --git a/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
index eca70cfe5..6b78462d6 100644
--- a/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -2,4 +2,4 @@
 <adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
     <background android:drawable="@drawable/ic_launcher_background" />
     <foreground android:drawable="@drawable/ic_launcher_foreground" />
-</adaptive-icon>
\ No newline at end of file
+</adaptive-icon>
diff --git a/deploy/fastdeploy/android/app/src/main/res/values/arrays.xml b/deploy/fastdeploy/android/app/src/main/res/values/arrays.xml
index c7cf12378..cbe18cac7 100644
--- a/deploy/fastdeploy/android/app/src/main/res/values/arrays.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/values/arrays.xml
@@ -36,4 +36,4 @@
         <item>true</item>
         <item>false</item>
     </string-array>
-</resources>
\ No newline at end of file
+</resources>
diff --git a/deploy/fastdeploy/android/app/src/main/res/values/values.xml b/deploy/fastdeploy/android/app/src/main/res/values/values.xml
index 156146d9a..940e97fd8 100644
--- a/deploy/fastdeploy/android/app/src/main/res/values/values.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/values/values.xml
@@ -14,4 +14,4 @@
 
     <dimen name="result_list_gap_width">15dp</dimen>
 
-</resources>
\ No newline at end of file
+</resources>
diff --git a/deploy/fastdeploy/android/app/src/main/res/xml/ocr_settings.xml b/deploy/fastdeploy/android/app/src/main/res/xml/ocr_settings.xml
index 692b74b4c..f09fcb114 100644
--- a/deploy/fastdeploy/android/app/src/main/res/xml/ocr_settings.xml
+++ b/deploy/fastdeploy/android/app/src/main/res/xml/ocr_settings.xml
@@ -42,4 +42,4 @@
         android:negativeButtonText="@null"
         android:positiveButtonText="@null"
         android:title="Enable Lite FP16" />
-</PreferenceScreen>
\ No newline at end of file
+</PreferenceScreen>
diff --git a/deploy/fastdeploy/ascend/README.md b/deploy/fastdeploy/ascend/README.md
index 3e13de3ef..3827c21de 100644
--- a/deploy/fastdeploy/ascend/README.md
+++ b/deploy/fastdeploy/ascend/README.md
@@ -2,7 +2,7 @@
 
 # PaddleOCR 模型在华为昇腾上部署方案-FastDeploy
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持通过FastDeploy在华为昇腾上部署相关模型
 
 ## 2. 支持模型列表
@@ -18,6 +18,6 @@ PaddleOCR支持通过FastDeploy在华为昇腾上部署相关模型
 | ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好|
 
 
-## 3. 详细部署的部署示例  
+## 3. 详细部署的部署示例
 - [Python部署](python)
 - [C++部署](cpp)
diff --git a/deploy/fastdeploy/ascend/cpp/README.md b/deploy/fastdeploy/ascend/cpp/README.md
index ed8d63a30..bd5d317de 100644
--- a/deploy/fastdeploy/ascend/cpp/README.md
+++ b/deploy/fastdeploy/ascend/cpp/README.md
@@ -52,7 +52,7 @@ wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_
 
 运行完成可视化结果如下图所示
 
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
diff --git a/deploy/fastdeploy/ascend/python/README.md b/deploy/fastdeploy/ascend/python/README.md
index 13a0fb644..f4fb48996 100644
--- a/deploy/fastdeploy/ascend/python/README.md
+++ b/deploy/fastdeploy/ascend/python/README.md
@@ -41,7 +41,7 @@ python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2
 
 运行完成可视化结果如下图所示
 
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
diff --git a/deploy/fastdeploy/cpu-gpu/README.md b/deploy/fastdeploy/cpu-gpu/README.md
index 69a8e3e7e..be0e8ea9e 100644
--- a/deploy/fastdeploy/cpu-gpu/README.md
+++ b/deploy/fastdeploy/cpu-gpu/README.md
@@ -2,7 +2,7 @@
 
 # PaddleOCR 模型在CPU与GPU上的部署方案-FastDeploy
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持通过FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署PaddleOCR系列模型
 
 ## 2. 支持的PaddleOCR推理模型
@@ -19,7 +19,7 @@ PaddleOCR支持通过FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、I
 | ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好|
 
 
-## 3. 详细部署的部署示例  
+## 3. 详细部署的部署示例
 - [Python部署](python)
 - [C++部署](cpp)
 - [C部署](c)
diff --git a/deploy/fastdeploy/cpu-gpu/c/README.md b/deploy/fastdeploy/cpu-gpu/c/README.md
index 7c5863773..b2245c8c5 100755
--- a/deploy/fastdeploy/cpu-gpu/c/README.md
+++ b/deploy/fastdeploy/cpu-gpu/c/README.md
@@ -3,10 +3,10 @@
 
 本目录下提供`infer.c`来调用C API快速完成PP-OCRv3模型在CPU/GPU上部署的示例。
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型.
 
-## 2. 部署环境准备  
+## 2. 部署环境准备
 在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
 以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
 
@@ -55,7 +55,7 @@ wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_
 # 在GPU上使用Paddle Inference推理
 ./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1
 ```
-以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考:  
+以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考:
 - [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md)
 
 
diff --git a/deploy/fastdeploy/cpu-gpu/cpp/README.md b/deploy/fastdeploy/cpu-gpu/cpp/README.md
index 4481f49be..4c6eb353b 100644
--- a/deploy/fastdeploy/cpu-gpu/cpp/README.md
+++ b/deploy/fastdeploy/cpu-gpu/cpp/README.md
@@ -2,10 +2,10 @@
 # PaddleOCR CPU-GPU C++部署示例
 
 本目录下提供`infer.cc`快速完成PP-OCRv3在CPU/GPU,以及GPU上通过Paddle-TensorRT加速部署的示例.
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型.
 
-## 2. 部署环境准备  
+## 2. 部署环境准备
 在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
 
 ## 3. 部署模型准备
@@ -14,7 +14,7 @@ PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、I
 ## 4. 运行部署示例
 以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0)
 
-```bash  
+```bash
 # 下载部署示例代码
 git clone https://github.com/PaddlePaddle/FastDeploy.git
 cd  FastDeploy/examples/vision/ocr/PP-OCR/cpu-gpu/cpp
@@ -80,14 +80,14 @@ wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_
 ```
 
 运行完成可视化结果如下图所示
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
-- 注意,以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考文档: [如何在Windows中使用FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_windows.md)  
+- 注意,以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考文档: [如何在Windows中使用FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_windows.md)
 - 关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md)
 
-## 5. 部署示例选项说明  
+## 5. 部署示例选项说明
 在我们使用`infer_demo`时, 输入了6个参数, 分别为文字检测模型, 文字分类模型, 文字识别模型, 预测图片, 字典文件与最后一位的数字选项.
 现在下表将解释最后一位数字选项的含义.
 |数字选项|含义|
diff --git a/deploy/fastdeploy/cpu-gpu/cpp/infer_cls.cc b/deploy/fastdeploy/cpu-gpu/cpp/infer_cls.cc
index 789c2a9f3..30687fe49 100644
--- a/deploy/fastdeploy/cpu-gpu/cpp/infer_cls.cc
+++ b/deploy/fastdeploy/cpu-gpu/cpp/infer_cls.cc
@@ -76,4 +76,4 @@ int main(int argc, char *argv[]) {
   std::string test_image = argv[2];
   InitAndInfer(cls_model_dir, test_image, option);
   return 0;
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/cpu-gpu/csharp/README.md b/deploy/fastdeploy/cpu-gpu/csharp/README.md
index 3a87730e1..c2c6d290f 100755
--- a/deploy/fastdeploy/cpu-gpu/csharp/README.md
+++ b/deploy/fastdeploy/cpu-gpu/csharp/README.md
@@ -3,10 +3,10 @@
 
 本目录下提供`infer.cs`来调用C# API快速完成PPOCRv3模型在CPU/GPU上部署的示例。
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型.
 
-## 2. 部署环境准备  
+## 2. 部署环境准备
 在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. 在本目录执行如下命令即可在Windows完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4)
 
 ## 3. 部署模型准备
diff --git a/deploy/fastdeploy/cpu-gpu/csharp/infer.cs b/deploy/fastdeploy/cpu-gpu/csharp/infer.cs
index 962500e08..fec48d16e 100644
--- a/deploy/fastdeploy/cpu-gpu/csharp/infer.cs
+++ b/deploy/fastdeploy/cpu-gpu/csharp/infer.cs
@@ -76,4 +76,4 @@ namespace Test
         }
 
     }
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/cpu-gpu/python/README.md b/deploy/fastdeploy/cpu-gpu/python/README.md
index d8143e028..fd7dbc579 100644
--- a/deploy/fastdeploy/cpu-gpu/python/README.md
+++ b/deploy/fastdeploy/cpu-gpu/python/README.md
@@ -1,11 +1,11 @@
-[English](README.md) | 简体中文  
+[English](README.md) | 简体中文
 # PaddleOCR CPU-GPU Python部署示例
 本目录下提供`infer.py`快速完成PP-OCRv3在CPU/GPU,以及GPU上通过Paddle-TensorRT加速部署的示例.
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型
 
-## 2. 部署环境准备  
+## 2. 部署环境准备
 在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库.
 
 ## 3. 部署模型准备
@@ -74,19 +74,19 @@ python infer_rec.py  --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_ke
 ```
 
 运行完成可视化结果如下图所示
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
-## 5. 部署示例选项说明  
+## 5. 部署示例选项说明
 
 |参数|含义|默认值
-|---|---|---|  
+|---|---|---|
 |--det_model|指定检测模型文件夹所在的路径|None|
 |--cls_model|指定分类模型文件夹所在的路径|None|
 |--rec_model|指定识别模型文件夹所在的路径|None|
 |--rec_label_file|识别模型所需label所在的路径|None|
-|--image|指定测试图片所在的路径|None|  
+|--image|指定测试图片所在的路径|None|
 |--device|指定即将运行的硬件类型,支持的值为`[cpu, gpu]`,当设置为cpu时,可运行在x86 cpu/arm cpu等cpu上|cpu|
 |--device_id|使用gpu时, 指定设备号|0|
 |--backend|部署模型时使用的后端, 支持的值为`[paddle,pptrt,pplite,ort,openvino,trt]` |paddle|
diff --git a/deploy/fastdeploy/kunlunxin/README.md b/deploy/fastdeploy/kunlunxin/README.md
index 16487674c..db2ca542e 100644
--- a/deploy/fastdeploy/kunlunxin/README.md
+++ b/deploy/fastdeploy/kunlunxin/README.md
@@ -2,7 +2,7 @@
 
 # PaddleOCR 在昆仑芯上部署方案-FastDeploy
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持利用FastDeploy在昆仑芯片上部署模型.
 
 支持如下芯片的部署
@@ -27,6 +27,6 @@ PaddleOCR支持利用FastDeploy在昆仑芯片上部署模型.
 | ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好|
 
 
-## 3. 详细部署的部署示例  
+## 3. 详细部署的部署示例
 - [Python部署](python)
 - [C++部署](cpp)
diff --git a/deploy/fastdeploy/kunlunxin/cpp/README.md b/deploy/fastdeploy/kunlunxin/cpp/README.md
index 3725a807e..ae35700c4 100644
--- a/deploy/fastdeploy/kunlunxin/cpp/README.md
+++ b/deploy/fastdeploy/kunlunxin/cpp/README.md
@@ -47,7 +47,7 @@ wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_
 
 运行完成可视化结果如下图所示
 
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
diff --git a/deploy/fastdeploy/kunlunxin/python/README.md b/deploy/fastdeploy/kunlunxin/python/README.md
index 724fad271..2c8929de4 100644
--- a/deploy/fastdeploy/kunlunxin/python/README.md
+++ b/deploy/fastdeploy/kunlunxin/python/README.md
@@ -40,7 +40,7 @@ python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2
 
 运行完成可视化结果如下图所示
 
-<div  align="center">  
+<div  align="center">
 <img width="640" src="https://user-images.githubusercontent.com/109218879/185826024-f7593a0c-1bd2-4a60-b76c-15588484fa08.jpg">
 </div>
 
diff --git a/deploy/fastdeploy/rockchip/README.md b/deploy/fastdeploy/rockchip/README.md
index b38f7f896..aafcd1140 100644
--- a/deploy/fastdeploy/rockchip/README.md
+++ b/deploy/fastdeploy/rockchip/README.md
@@ -2,7 +2,7 @@
 
 # PaddleOCR 模型在RKNPU2上部署方案-FastDeploy
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持通过FastDeploy在RKNPU2上部署相关模型.
 
 ## 2. 支持模型列表
@@ -18,6 +18,6 @@ PaddleOCR支持通过FastDeploy在RKNPU2上部署相关模型.
 | ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好|
 
 
-## 3. 详细部署的部署示例  
+## 3. 详细部署的部署示例
 - [Python部署](python)
 - [C++部署](cpp)
diff --git a/deploy/fastdeploy/serving/fastdeploy_serving/models/det_runtime/config.pbtxt b/deploy/fastdeploy/serving/fastdeploy_serving/models/det_runtime/config.pbtxt
index 96d85e3e1..8c4c30bab 100755
--- a/deploy/fastdeploy/serving/fastdeploy_serving/models/det_runtime/config.pbtxt
+++ b/deploy/fastdeploy/serving/fastdeploy_serving/models/det_runtime/config.pbtxt
@@ -49,4 +49,4 @@ optimization {
       }
     ]
   }
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt b/deploy/fastdeploy/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt
index 037d7a9f2..4c630c465 100755
--- a/deploy/fastdeploy/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt
+++ b/deploy/fastdeploy/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt
@@ -49,4 +49,4 @@ optimization {
       }
     ]
   }
-}
\ No newline at end of file
+}
diff --git a/deploy/fastdeploy/sophgo/README.md b/deploy/fastdeploy/sophgo/README.md
index 9fd2e9563..30a89f418 100644
--- a/deploy/fastdeploy/sophgo/README.md
+++ b/deploy/fastdeploy/sophgo/README.md
@@ -2,7 +2,7 @@
 
 # PaddleOCR 模型在SOPHGO上部署方案-FastDeploy
 
-## 1. 说明  
+## 1. 说明
 PaddleOCR支持通过FastDeploy在SOPHGO上部署相关模型.
 
 ## 2.支持模型列表
@@ -19,7 +19,7 @@ PaddleOCR支持通过FastDeploy在SOPHGO上部署相关模型.
 
 ## 3. 准备PP-OCR推理模型以及转换模型
 
-PP-OCRv3包括文本检测模型(ch_PP-OCRv3_det)、方向分类模型(ch_ppocr_mobile_v2.0_cls)、文字识别模型(ch_PP-OCRv3_rec)  
+PP-OCRv3包括文本检测模型(ch_PP-OCRv3_det)、方向分类模型(ch_ppocr_mobile_v2.0_cls)、文字识别模型(ch_PP-OCRv3_rec)
 SOPHGO-TPU部署模型前需要将以上Paddle模型转换成bmodel模型,我们以ch_PP-OCRv3_det模型为例,具体步骤如下:
 - 下载Paddle模型[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar)
 - Pddle模型转换为ONNX模型,请参考[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX)
@@ -97,6 +97,6 @@ model_deploy.py \
 最终获得可以在BM1684x上能够运行的bmodel模型ch_PP-OCRv3_det_1684x_f32.bmodel。按照上面同样的方法,可以将ch_ppocr_mobile_v2.0_cls,ch_PP-OCRv3_rec转换为bmodel的格式。如果需要进一步对模型进行加速,可以将ONNX模型转换为INT8 bmodel,具体步骤参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)。
 
 
-## 4. 详细部署的部署示例  
+## 4. 详细部署的部署示例
 - [Python部署](python)
 - [C++部署](cpp)
diff --git a/deploy/fastdeploy/sophgo/python/README.md b/deploy/fastdeploy/sophgo/python/README.md
index 27dbe2694..1f4db9f9e 100644
--- a/deploy/fastdeploy/sophgo/python/README.md
+++ b/deploy/fastdeploy/sophgo/python/README.md
@@ -10,7 +10,7 @@
 ## 2.运行部署示例
 
 ### 2.1 模型准备
-将Paddle模型转换为SOPHGO bmodel模型, 转换步骤参考[文档](../README.md)  
+将Paddle模型转换为SOPHGO bmodel模型, 转换步骤参考[文档](../README.md)
 
 ### 2.2 开始部署
 ```bash
@@ -34,7 +34,7 @@ wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_
 python3 infer.py --det_model ocr_bmodel/ch_PP-OCRv3_det_1684x_f32.bmodel \
                  --cls_model ocr_bmodel/ch_ppocr_mobile_v2.0_cls_1684x_f32.bmodel \
                  --rec_model ocr_bmodel/ch_PP-OCRv3_rec_1684x_f32.bmodel \
-                 --rec_label_file ../ppocr_keys_v1.txt \  
+                 --rec_label_file ../ppocr_keys_v1.txt \
                  --image ../12.jpg
 
 # 运行完成后返回结果如下所示
diff --git a/deploy/fastdeploy/sophgo/python/infer.py b/deploy/fastdeploy/sophgo/python/infer.py
index c92ee7ad1..df8a93c3c 100644
--- a/deploy/fastdeploy/sophgo/python/infer.py
+++ b/deploy/fastdeploy/sophgo/python/infer.py
@@ -1,119 +1,119 @@
-import fastdeploy as fd
-import cv2
-import os
-
-
-def parse_arguments():
-    import argparse
-    import ast
-
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        "--det_model", required=True, help="Path of Detection model of PPOCR."
-    )
-    parser.add_argument(
-        "--cls_model", required=True, help="Path of Classification model of PPOCR."
-    )
-    parser.add_argument(
-        "--rec_model", required=True, help="Path of Recognization model of PPOCR."
-    )
-    parser.add_argument(
-        "--rec_label_file", required=True, help="Path of Recognization label of PPOCR."
-    )
-    parser.add_argument(
-        "--image", type=str, required=True, help="Path of test image file."
-    )
-
-    return parser.parse_args()
-
-
-args = parse_arguments()
-
-# 配置runtime,加载模型
-runtime_option = fd.RuntimeOption()
-runtime_option.use_sophgo()
-
-# Detection模型, 检测文字框
-det_model_file = args.det_model
-det_params_file = ""
-# Classification模型,方向分类,可选
-cls_model_file = args.cls_model
-cls_params_file = ""
-# Recognition模型,文字识别模型
-rec_model_file = args.rec_model
-rec_params_file = ""
-rec_label_file = args.rec_label_file
-
-# PPOCR的cls和rec模型现在已经支持推理一个Batch的数据
-# 定义下面两个变量后, 可用于设置trt输入shape, 并在PPOCR模型初始化后, 完成Batch推理设置
-cls_batch_size = 1
-rec_batch_size = 1
-
-# 当使用TRT时,分别给三个模型的runtime设置动态shape,并完成模型的创建.
-# 注意: 需要在检测模型创建完成后,再设置分类模型的动态输入并创建分类模型, 识别模型同理.
-# 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数.
-det_option = runtime_option
-det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], [1, 3, 960, 960])
-# 用户可以把TRT引擎文件保存至本地
-# det_option.set_trt_cache_file(args.det_model  + "/det_trt_cache.trt")
-det_model = fd.vision.ocr.DBDetector(
-    det_model_file,
-    det_params_file,
-    runtime_option=det_option,
-    model_format=fd.ModelFormat.SOPHGO,
-)
-
-cls_option = runtime_option
-cls_option.set_trt_input_shape(
-    "x", [1, 3, 48, 10], [cls_batch_size, 3, 48, 320], [cls_batch_size, 3, 48, 1024]
-)
-# 用户可以把TRT引擎文件保存至本地
-# cls_option.set_trt_cache_file(args.cls_model  + "/cls_trt_cache.trt")
-cls_model = fd.vision.ocr.Classifier(
-    cls_model_file,
-    cls_params_file,
-    runtime_option=cls_option,
-    model_format=fd.ModelFormat.SOPHGO,
-)
-
-rec_option = runtime_option
-rec_option.set_trt_input_shape(
-    "x", [1, 3, 48, 10], [rec_batch_size, 3, 48, 320], [rec_batch_size, 3, 48, 2304]
-)
-# 用户可以把TRT引擎文件保存至本地
-# rec_option.set_trt_cache_file(args.rec_model  + "/rec_trt_cache.trt")
-rec_model = fd.vision.ocr.Recognizer(
-    rec_model_file,
-    rec_params_file,
-    rec_label_file,
-    runtime_option=rec_option,
-    model_format=fd.ModelFormat.SOPHGO,
-)
-
-# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None
-ppocr_v3 = fd.vision.ocr.PPOCRv3(
-    det_model=det_model, cls_model=cls_model, rec_model=rec_model
-)
-
-# 需要使用下行代码, 来启用rec模型的静态shape推理,这里rec模型的静态输入为[3, 48, 584]
-rec_model.preprocessor.static_shape_infer = True
-rec_model.preprocessor.rec_image_shape = [3, 48, 584]
-
-# 给cls和rec模型设置推理时的batch size
-# 此值能为-1, 和1到正无穷
-# 当此值为-1时, cls和rec模型的batch size将默认和det模型检测出的框的数量相同
-ppocr_v3.cls_batch_size = cls_batch_size
-ppocr_v3.rec_batch_size = rec_batch_size
-
-# 预测图片准备
-im = cv2.imread(args.image)
-
-# 预测并打印结果
-result = ppocr_v3.predict(im)
-
-print(result)
-
-# 可视化结果
-vis_im = fd.vision.vis_ppocr(im, result)
-cv2.imwrite("sophgo_result.jpg", vis_im)
-print("Visualized result save in ./sophgo_result.jpg")
+import fastdeploy as fd
+import cv2
+import os
+
+
+def parse_arguments():
+    import argparse
+    import ast
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "--det_model", required=True, help="Path of Detection model of PPOCR."
+    )
+    parser.add_argument(
+        "--cls_model", required=True, help="Path of Classification model of PPOCR."
+    )
+    parser.add_argument(
+        "--rec_model", required=True, help="Path of Recognization model of PPOCR."
+    )
+    parser.add_argument(
+        "--rec_label_file", required=True, help="Path of Recognization label of PPOCR."
+    )
+    parser.add_argument(
+        "--image", type=str, required=True, help="Path of test image file."
+    )
+
+    return parser.parse_args()
+
+
+args = parse_arguments()
+
+# 配置runtime,加载模型
+runtime_option = fd.RuntimeOption()
+runtime_option.use_sophgo()
+
+# Detection模型, 检测文字框
+det_model_file = args.det_model
+det_params_file = ""
+# Classification模型,方向分类,可选
+cls_model_file = args.cls_model
+cls_params_file = ""
+# Recognition模型,文字识别模型
+rec_model_file = args.rec_model
+rec_params_file = ""
+rec_label_file = args.rec_label_file
+
+# PPOCR的cls和rec模型现在已经支持推理一个Batch的数据
+# 定义下面两个变量后, 可用于设置trt输入shape, 并在PPOCR模型初始化后, 完成Batch推理设置
+cls_batch_size = 1
+rec_batch_size = 1
+
+# 当使用TRT时,分别给三个模型的runtime设置动态shape,并完成模型的创建.
+# 注意: 需要在检测模型创建完成后,再设置分类模型的动态输入并创建分类模型, 识别模型同理.
+# 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数.
+det_option = runtime_option
+det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], [1, 3, 960, 960])
+# 用户可以把TRT引擎文件保存至本地
+# det_option.set_trt_cache_file(args.det_model  + "/det_trt_cache.trt")
+det_model = fd.vision.ocr.DBDetector(
+    det_model_file,
+    det_params_file,
+    runtime_option=det_option,
+    model_format=fd.ModelFormat.SOPHGO,
+)
+
+cls_option = runtime_option
+cls_option.set_trt_input_shape(
+    "x", [1, 3, 48, 10], [cls_batch_size, 3, 48, 320], [cls_batch_size, 3, 48, 1024]
+)
+# 用户可以把TRT引擎文件保存至本地
+# cls_option.set_trt_cache_file(args.cls_model  + "/cls_trt_cache.trt")
+cls_model = fd.vision.ocr.Classifier(
+    cls_model_file,
+    cls_params_file,
+    runtime_option=cls_option,
+    model_format=fd.ModelFormat.SOPHGO,
+)
+
+rec_option = runtime_option
+rec_option.set_trt_input_shape(
+    "x", [1, 3, 48, 10], [rec_batch_size, 3, 48, 320], [rec_batch_size, 3, 48, 2304]
+)
+# 用户可以把TRT引擎文件保存至本地
+# rec_option.set_trt_cache_file(args.rec_model  + "/rec_trt_cache.trt")
+rec_model = fd.vision.ocr.Recognizer(
+    rec_model_file,
+    rec_params_file,
+    rec_label_file,
+    runtime_option=rec_option,
+    model_format=fd.ModelFormat.SOPHGO,
+)
+
+# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None
+ppocr_v3 = fd.vision.ocr.PPOCRv3(
+    det_model=det_model, cls_model=cls_model, rec_model=rec_model
+)
+
+# 需要使用下行代码, 来启用rec模型的静态shape推理,这里rec模型的静态输入为[3, 48, 584]
+rec_model.preprocessor.static_shape_infer = True
+rec_model.preprocessor.rec_image_shape = [3, 48, 584]
+
+# 给cls和rec模型设置推理时的batch size
+# 此值能为-1, 和1到正无穷
+# 当此值为-1时, cls和rec模型的batch size将默认和det模型检测出的框的数量相同
+ppocr_v3.cls_batch_size = cls_batch_size
+ppocr_v3.rec_batch_size = rec_batch_size
+
+# 预测图片准备
+im = cv2.imread(args.image)
+
+# 预测并打印结果
+result = ppocr_v3.predict(im)
+
+print(result)
+
+# 可视化结果
+vis_im = fd.vision.vis_ppocr(im, result)
+cv2.imwrite("sophgo_result.jpg", vis_im)
+print("Visualized result save in ./sophgo_result.jpg")
diff --git a/deploy/hubserving/kie_ser/config.json b/deploy/hubserving/kie_ser/config.json
index b93a5f716..17b00d9f6 100644
--- a/deploy/hubserving/kie_ser/config.json
+++ b/deploy/hubserving/kie_ser/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/hubserving/kie_ser_re/config.json b/deploy/hubserving/kie_ser_re/config.json
index 4d796a860..abc78975b 100644
--- a/deploy/hubserving/kie_ser_re/config.json
+++ b/deploy/hubserving/kie_ser_re/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/hubserving/ocr_system/config.json b/deploy/hubserving/ocr_system/config.json
index 48e7e1542..fbde31d06 100644
--- a/deploy/hubserving/ocr_system/config.json
+++ b/deploy/hubserving/ocr_system/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/hubserving/structure_layout/config.json b/deploy/hubserving/structure_layout/config.json
index bc52c1ab6..43a598304 100644
--- a/deploy/hubserving/structure_layout/config.json
+++ b/deploy/hubserving/structure_layout/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/hubserving/structure_system/config.json b/deploy/hubserving/structure_system/config.json
index 642aa94a2..7b6c16687 100644
--- a/deploy/hubserving/structure_system/config.json
+++ b/deploy/hubserving/structure_system/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/hubserving/structure_table/config.json b/deploy/hubserving/structure_table/config.json
index d0e3cb152..9ce15fc48 100644
--- a/deploy/hubserving/structure_table/config.json
+++ b/deploy/hubserving/structure_table/config.json
@@ -13,4 +13,3 @@
     "use_multiprocess": false,
     "workers": 2
 }
-
diff --git a/deploy/lite/cls_process.cc b/deploy/lite/cls_process.cc
index 9f5c3e940..591347f1e 100644
--- a/deploy/lite/cls_process.cc
+++ b/deploy/lite/cls_process.cc
@@ -40,4 +40,4 @@ cv::Mat ClsResizeImg(cv::Mat img) {
                        cv::BORDER_CONSTANT, cv::Scalar(0, 0, 0));
   }
   return resize_img;
-}
\ No newline at end of file
+}
diff --git a/deploy/lite/cls_process.h b/deploy/lite/cls_process.h
index eedeeb9ba..7fb8ab3c2 100644
--- a/deploy/lite/cls_process.h
+++ b/deploy/lite/cls_process.h
@@ -26,4 +26,4 @@
 #include "opencv2/imgcodecs.hpp"
 #include "opencv2/imgproc.hpp"
 
-cv::Mat ClsResizeImg(cv::Mat img);
\ No newline at end of file
+cv::Mat ClsResizeImg(cv::Mat img);
diff --git a/deploy/lite/config.txt b/deploy/lite/config.txt
index 404249323..ecbf7ad5b 100644
--- a/deploy/lite/config.txt
+++ b/deploy/lite/config.txt
@@ -5,4 +5,4 @@ det_db_unclip_ratio  1.6
 det_db_use_dilate 0
 det_use_polygon_score 1
 use_direction_classify  1
-rec_image_height  48
\ No newline at end of file
+rec_image_height  48
diff --git a/deploy/lite/readme.md b/deploy/lite/readme.md
index fc91cbfa7..810a5854b 100644
--- a/deploy/lite/readme.md
+++ b/deploy/lite/readme.md
@@ -204,7 +204,7 @@ The structure of the OCR demo is as follows after the above command is executed:
 
 ```
 demo/cxx/ocr/
-|-- debug/  
+|-- debug/
 |   |--ch_PP-OCRv3_det_slim_opt.nb           Detection model
 |   |--ch_PP-OCRv3_rec_slim_opt.nb           Recognition model
 |   |--ch_ppocr_mobile_v2.0_cls_slim_opt.nb           Text direction classification model
@@ -219,7 +219,7 @@ demo/cxx/ocr/
 |-- crnn_process.h
 |-- db_post_process.cc          Pre-processing and post-processing files for the DB model
 |-- db_post_process.h
-|-- Makefile  
+|-- Makefile
 |-- ocr_db_crnn.cc              C++ main code
 ```
 
diff --git a/deploy/lite/readme_ch.md b/deploy/lite/readme_ch.md
index 78e251091..0de559942 100644
--- a/deploy/lite/readme_ch.md
+++ b/deploy/lite/readme_ch.md
@@ -202,7 +202,7 @@ paddle_lite_opt --model_file=./ch_ppocr_mobile_v2.0_cls_slim_infer/inference.pdm
 
 ```
 demo/cxx/ocr/
-|-- debug/  
+|-- debug/
 |   |--ch_PP-OCRv3_det_slim_opt.nb           优化后的检测模型文件
 |   |--ch_PP-OCRv3_rec_slim_opt.nb           优化后的识别模型文件
 |   |--ch_ppocr_mobile_v2.0_cls_slim_opt.nb           优化后的文字方向分类器模型文件
diff --git a/deploy/paddlecloud/README.md b/deploy/paddlecloud/README.md
index 1ff49c7a0..7ff58ce73 100644
--- a/deploy/paddlecloud/README.md
+++ b/deploy/paddlecloud/README.md
@@ -163,10 +163,10 @@ python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3' tools/t
 训练过程中保存的模型在output目录下,包含以下文件:
 
 ```
-best_accuracy.states  
+best_accuracy.states
 best_accuracy.pdparams  # 默认保存最优精度的模型参数
 best_accuracy.pdopt     # 默认保存最优精度的优化器相关参数
-latest.states  
+latest.states
 latest.pdparams  # 默认保存的最新模型参数
 latest.pdopt     # 默认保存的最新模型的优化器相关参数
 ```
diff --git a/deploy/pdserving/README.md b/deploy/pdserving/README.md
index 83329a11c..7618ef708 100644
--- a/deploy/pdserving/README.md
+++ b/deploy/pdserving/README.md
@@ -6,7 +6,7 @@ PaddleOCR provides two service deployment methods:
 - Based on **PaddleHub Serving**: Code path is "`./deploy/hubserving`". Please refer to the [tutorial](../../deploy/hubserving/readme_en.md)
 - Based on **PaddleServing**: Code path is "`./deploy/pdserving`". Please follow this tutorial.
 
-# Service deployment based on PaddleServing  
+# Service deployment based on PaddleServing
 
 This document will introduce how to use the [PaddleServing](https://github.com/PaddlePaddle/Serving/blob/develop/README.md) to deploy the PPOCR dynamic graph model as a pipeline online service.
 
@@ -101,13 +101,13 @@ python3 -m paddle_serving_client.convert --dirname ./ch_PP-OCRv3_rec_infer/ \
 After the detection model is converted, there will be additional folders of `ppocr_det_v3_serving` and `ppocr_det_v3_client` in the current folder, with the following format:
 ```
 |- ppocr_det_v3_serving/
-  |- __model__  
+  |- __model__
   |- __params__
-  |- serving_server_conf.prototxt  
+  |- serving_server_conf.prototxt
   |- serving_server_conf.stream.prototxt
 
 |- ppocr_det_v3_client
-  |- serving_client_conf.prototxt  
+  |- serving_client_conf.prototxt
   |- serving_client_conf.stream.prototxt
 
 ```
@@ -120,7 +120,7 @@ The recognition model is the same.
     ```
     git clone https://github.com/PaddlePaddle/PaddleOCR
 
-    # Enter the working directory  
+    # Enter the working directory
     cd PaddleOCR/deploy/pdserving/
     ```
 
@@ -146,7 +146,7 @@ The recognition model is the same.
     python3 pipeline_http_client.py
     ```
     After successfully running, the predicted result of the model will be printed in the cmd window. An example of the result is:
-    ![](./imgs/results.png)  
+    ![](./imgs/results.png)
 
     Adjust the number of concurrency in config.yml to get the largest QPS. Generally, the number of concurrent detection and recognition is 2:1
 
@@ -243,7 +243,7 @@ The C++ service deployment is the same as python in the environment setup and da
     python3 ocr_cpp_client.py ppocr_det_v3_client ppocr_rec_v3_client
     ```
     After successfully running, the predicted result of the model will be printed in the cmd window. An example of the result is:
-    ![](./imgs/results.png)  
+    ![](./imgs/results.png)
 
 ## WINDOWS Users
 
@@ -282,4 +282,4 @@ python3 ocr_web_client.py
 ```
 unset https_proxy
 unset http_proxy
-```  
+```
diff --git a/deploy/slim/auto_compression/README.md b/deploy/slim/auto_compression/README.md
index 408256e10..23bfba43b 100644
--- a/deploy/slim/auto_compression/README.md
+++ b/deploy/slim/auto_compression/README.md
@@ -273,7 +273,7 @@ Eval:
     name: SimpleDataSet
     data_dir: datasets/v4_4_test_dataset
     label_file_list:
-      - datasets/v4_4_test_dataset/label.txt  
+      - datasets/v4_4_test_dataset/label.txt
 ```
 
 ### 5.2 软件环境一致,硬件不同导致精度差异很大?
@@ -291,7 +291,7 @@ if args.precision == 'int8' and "ppocrv4_det_server_qat_dist.yaml" in args.confi
         use_static=True,
         use_calib_mode=False, )
     pred_cfg.exp_disable_tensorrt_ops(["elementwise_add"])
-else:  
+else:
     pred_cfg.enable_tensorrt_engine(
     workspace_size=1 << 30,
     max_batch_size=1,
diff --git a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_qat_dist.yaml b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_qat_dist.yaml
index 614234797..466742209 100644
--- a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_qat_dist.yaml
+++ b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_qat_dist.yaml
@@ -160,4 +160,4 @@ Eval:
     shuffle: false
     drop_last: false
     batch_size_per_card: 1
-    num_workers: 10
\ No newline at end of file
+    num_workers: 10
diff --git a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_server_qat_dist.yaml b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_server_qat_dist.yaml
index 3cb32713d..febcd0d24 100644
--- a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_server_qat_dist.yaml
+++ b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_det_server_qat_dist.yaml
@@ -158,4 +158,4 @@ Eval:
     shuffle: false
     drop_last: false
     batch_size_per_card: 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_rec_server_qat_dist.yaml b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_rec_server_qat_dist.yaml
index 44c1ade97..09c1bc6e0 100644
--- a/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_rec_server_qat_dist.yaml
+++ b/deploy/slim/auto_compression/configs/ppocrv4/ppocrv4_rec_server_qat_dist.yaml
@@ -110,4 +110,4 @@ Eval:
     shuffle: false
     drop_last: false
     batch_size_per_card: 1
-    num_workers: 4
\ No newline at end of file
+    num_workers: 4
diff --git a/deploy/slim/auto_compression/ppocr_keys_v1.txt b/deploy/slim/auto_compression/ppocr_keys_v1.txt
index 84b885d83..b75af2130 100644
--- a/deploy/slim/auto_compression/ppocr_keys_v1.txt
+++ b/deploy/slim/auto_compression/ppocr_keys_v1.txt
@@ -6620,4 +6620,4 @@ j
 緖
 續
 紹
-懮
\ No newline at end of file
+懮
diff --git a/deploy/slim/auto_compression/ppocrv4_det_server_dataset_process.py b/deploy/slim/auto_compression/ppocrv4_det_server_dataset_process.py
index 43a70f35a..f99e901d8 100644
--- a/deploy/slim/auto_compression/ppocrv4_det_server_dataset_process.py
+++ b/deploy/slim/auto_compression/ppocrv4_det_server_dataset_process.py
@@ -13,7 +13,7 @@ with open(annotation_file, "r") as f:
     lines = f.readlines()
 
 for i, line in enumerate(lines):
-    image_name = line.split("	")[0]
+    image_name = line.split("   ")[0]
 
     image_path = os.path.join(dataset_path, image_name)
 
diff --git a/deploy/slim/prune/README_en.md b/deploy/slim/prune/README_en.md
index 9a0ed5291..7fa1e5b20 100644
--- a/deploy/slim/prune/README_en.md
+++ b/deploy/slim/prune/README_en.md
@@ -38,7 +38,7 @@ PaddleOCR also provides a series of [models](../../../doc/doc_en/models_list_en.
   After the pre-trained model is loaded, sensitivity analysis is performed on each network layer of the model to understand the redundancy of each network layer, and save a sensitivity file which named: sen.pickle.  After that, user could load the sensitivity file via the [methods provided by PaddleSlim](https://github.com/PaddlePaddle/PaddleSlim/blob/develop/paddleslim/prune/sensitive.py#L221) and determining the pruning ratio of each network layer automatically. For specific details of sensitivity analysis, see:[Sensitivity analysis](https://github.com/PaddlePaddle/PaddleSlim/blob/develop/docs/en/tutorials/image_classification_sensitivity_analysis_tutorial_en.md)
   The data format of sensitivity file:
 
-```  
+```
 sen.pickle(Dict){
               'layer_weight_name_0': sens_of_each_ratio(Dict){'pruning_ratio_0': acc_loss, 'pruning_ratio_1': acc_loss}
               'layer_weight_name_1': sens_of_each_ratio(Dict){'pruning_ratio_0': acc_loss, 'pruning_ratio_1': acc_loss}
diff --git a/deploy/slim/prune/sensitivity_anal.py b/deploy/slim/prune/sensitivity_anal.py
index 4fb2f6a2e..d5663195f 100644
--- a/deploy/slim/prune/sensitivity_anal.py
+++ b/deploy/slim/prune/sensitivity_anal.py
@@ -135,14 +135,14 @@ def main(config, device, logger, vdl_writer):
 
     run_sensitive_analysis = False
     """
-    run_sensitive_analysis=True: 
-        Automatically compute the sensitivities of convolutions in a model. 
-        The sensitivity of a convolution is the losses of accuracy on test dataset in 
-        different pruned ratios. The sensitivities can be used to get a group of best 
+    run_sensitive_analysis=True:
+        Automatically compute the sensitivities of convolutions in a model.
+        The sensitivity of a convolution is the losses of accuracy on test dataset in
+        different pruned ratios. The sensitivities can be used to get a group of best
         ratios with some condition.
-    
-    run_sensitive_analysis=False: 
-        Set prune trim ratio to a fixed value, such as 10%. The larger the value, 
+
+    run_sensitive_analysis=False:
+        Set prune trim ratio to a fixed value, such as 10%. The larger the value,
         the more convolution weights will be cropped.
 
     """
diff --git a/doc/doc_ch/FAQ.md b/doc/doc_ch/FAQ.md
index 12fac9990..07dc0ae7e 100644
--- a/doc/doc_ch/FAQ.md
+++ b/doc/doc_ch/FAQ.md
@@ -188,7 +188,7 @@ A:可以看下训练的尺度和预测的尺度是否相同,如果训练的
 
 #### Q: 图像正常识别出来的文字是OK的,旋转90度后识别出来的结果就比较差,有什么方法可以优化?
 
-**A**: 	整图旋转90之后效果变差是有可能的,因为目前PPOCR默认输入的图片是正向的; 可以自己训练一个整图的方向分类器,放在预测的最前端(可以参照现有方向分类器的方式),或者可以基于规则做一些预处理,比如判断长宽等等。
+**A**:  整图旋转90之后效果变差是有可能的,因为目前PPOCR默认输入的图片是正向的; 可以自己训练一个整图的方向分类器,放在预测的最前端(可以参照现有方向分类器的方式),或者可以基于规则做一些预处理,比如判断长宽等等。
 
 #### Q: 如何识别竹简上的古文?
 
@@ -355,7 +355,7 @@ A:当训练数据量少时,可以尝试以下三种方式获取更多的数
 
 ### 2.4 数据标注与生成
 
-> [!NOTE]  
+> [!NOTE]
 > StyleText 已经移动到 [PFCCLab/StyleText](https://github.com/PFCCLab/StyleText)
 
 #### Q: Style-Text 如何不文字风格迁移,就像普通文本生成程序一样默认字体直接输出到分割的背景图?
diff --git a/doc/doc_ch/PPOCRv3_det_train.md b/doc/doc_ch/PPOCRv3_det_train.md
index 45f459ba6..616d77f69 100644
--- a/doc/doc_ch/PPOCRv3_det_train.md
+++ b/doc/doc_ch/PPOCRv3_det_train.md
@@ -55,10 +55,10 @@ python3 -m paddle.distributed.launch --gpus '0,1,2,3' tools/train.py -c configs/
 
 训练过程中保存的模型在output目录下,包含以下文件:
 ```
-best_accuracy.states  
+best_accuracy.states
 best_accuracy.pdparams  # 默认保存最优精度的模型参数
 best_accuracy.pdopt     # 默认保存最优精度的优化器相关参数
-latest.states  
+latest.states
 latest.pdparams  # 默认保存的最新模型参数
 latest.pdopt     # 默认保存的最新模型的优化器相关参数
 ```
diff --git a/doc/doc_ch/algorithm_det_sast.md b/doc/doc_ch/algorithm_det_sast.md
index f18eaf1a4..2ac8eeaa8 100644
--- a/doc/doc_ch/algorithm_det_sast.md
+++ b/doc/doc_ch/algorithm_det_sast.md
@@ -51,7 +51,7 @@
 
 <a name="4-1"></a>
 ### 4.1 Python推理
-#### (1). 四边形文本检测模型(ICDAR2015)  
+#### (1). 四边形文本检测模型(ICDAR2015)
 首先将SAST文本检测训练过程中保存的模型,转换成inference model。以基于Resnet50_vd骨干网络,在ICDAR2015英文数据集训练的模型为例([模型下载地址](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_icdar15_v2.0_train.tar)),可以使用如下命令进行转换:
 ```
 python3 tools/export_model.py -c configs/det/det_r50_vd_sast_icdar15.yml -o Global.pretrained_model=./det_r50_vd_sast_icdar15_v2.0_train/best_accuracy  Global.save_inference_dir=./inference/det_sast_ic15
@@ -65,7 +65,7 @@ python3 tools/infer/predict_det.py --det_algorithm="SAST" --image_dir="./doc/img
 
 ![](../imgs_results/det_res_img_10_sast.jpg)
 
-#### (2). 弯曲文本检测模型(Total-Text)  
+#### (2). 弯曲文本检测模型(Total-Text)
 首先将SAST文本检测训练过程中保存的模型,转换成inference model。以基于Resnet50_vd骨干网络,在Total-Text英文数据集训练的模型为例([模型下载地址](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_totaltext_v2.0_train.tar)),可以使用如下命令进行转换:
 
 ```
diff --git a/doc/doc_ch/algorithm_e2e_pgnet.md b/doc/doc_ch/algorithm_e2e_pgnet.md
index 934328106..78d710fdb 100644
--- a/doc/doc_ch/algorithm_e2e_pgnet.md
+++ b/doc/doc_ch/algorithm_e2e_pgnet.md
@@ -88,7 +88,7 @@ python3 tools/infer/predict_e2e.py --e2e_algorithm="PGNet" --image_dir="./doc/im
 /PaddleOCR/train_data/total_text/train/
   |- rgb/            # total_text数据集的训练数据
       |- img11.jpg
-      | ...  
+      | ...
   |- train.txt       # total_text数据集的训练标注
 ```
 
@@ -161,7 +161,7 @@ python3 tools/infer_e2e.py -c configs/e2e/e2e_r50_vd_pg.yml -o Global.infer_img=
 ```
 
 ### 预测推理
-#### (1). 四边形文本检测模型(ICDAR2015)  
+#### (1). 四边形文本检测模型(ICDAR2015)
 首先将PGNet端到端训练过程中保存的模型,转换成inference model。以基于Resnet50_vd骨干网络,以英文数据集训练的模型为例[模型下载地址](https://paddleocr.bj.bcebos.com/dygraph_v2.0/pgnet/en_server_pgnetA.tar) ,可以使用如下命令进行转换:
 ```
 wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/pgnet/en_server_pgnetA.tar && tar xf en_server_pgnetA.tar
diff --git a/doc/doc_ch/algorithm_inference.md b/doc/doc_ch/algorithm_inference.md
index d8457ea36..f4858a9a0 100755
--- a/doc/doc_ch/algorithm_inference.md
+++ b/doc/doc_ch/algorithm_inference.md
@@ -11,7 +11,7 @@ inference 模型(`paddle.jit.save`保存的模型)
 
 - [一、训练模型转inference模型](#训练模型转inference模型)
     - [检测模型转inference模型](#检测模型转inference模型)
-    - [识别模型转inference模型](#识别模型转inference模型)  
+    - [识别模型转inference模型](#识别模型转inference模型)
     - [方向分类模型转inference模型](#方向分类模型转inference模型)
 
 
@@ -19,7 +19,7 @@ inference 模型(`paddle.jit.save`保存的模型)
     - [1. 超轻量中文检测模型推理](#超轻量中文检测模型推理)
     - [2. DB文本检测模型推理](#DB文本检测模型推理)
     - [3. EAST文本检测模型推理](#EAST文本检测模型推理)
-    - [4. SAST文本检测模型推理](#SAST文本检测模型推理)  
+    - [4. SAST文本检测模型推理](#SAST文本检测模型推理)
 
 
 - [三、文本识别模型推理](#文本识别模型推理)
@@ -209,7 +209,7 @@ python3 tools/infer/predict_det.py --det_algorithm="EAST" --image_dir="./doc/img
 
 <a name="SAST文本检测模型推理"></a>
 ### 4. SAST文本检测模型推理
-#### (1). 四边形文本检测模型(ICDAR2015)  
+#### (1). 四边形文本检测模型(ICDAR2015)
 首先将SAST文本检测训练过程中保存的模型,转换成inference model。以基于Resnet50_vd骨干网络,在ICDAR2015英文数据集训练的模型为例([模型下载地址](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_icdar15_v2.0_train.tar)),可以使用如下命令进行转换:
 ```
 python3 tools/export_model.py -c configs/det/det_r50_vd_sast_icdar15.yml -o Global.pretrained_model=./det_r50_vd_sast_icdar15_v2.0_train/best_accuracy  Global.save_inference_dir=./inference/det_sast_ic15
@@ -223,7 +223,7 @@ python3 tools/infer/predict_det.py --det_algorithm="SAST" --image_dir="./doc/img
 
 ![](../imgs_results/det_res_img_10_sast.jpg)
 
-#### (2). 弯曲文本检测模型(Total-Text)  
+#### (2). 弯曲文本检测模型(Total-Text)
 首先将SAST文本检测训练过程中保存的模型,转换成inference model。以基于Resnet50_vd骨干网络,在Total-Text英文数据集训练的模型为例([模型下载地址](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_totaltext_v2.0_train.tar)),可以使用如下命令进行转换:
 
 ```
diff --git a/doc/doc_ch/algorithm_overview.md b/doc/doc_ch/algorithm_overview.md
index b8d4aedc1..6fb277a1f 100755
--- a/doc/doc_ch/algorithm_overview.md
+++ b/doc/doc_ch/algorithm_overview.md
@@ -57,7 +57,7 @@ PaddleOCR将**持续新增**支持OCR领域前沿算法与模型,**欢迎广
 在CTW1500文本检测公开数据集上,算法效果如下:
 
 |模型|骨干网络|precision|recall|Hmean|下载链接|
-| --- | --- | --- | --- | --- | --- |  
+| --- | --- | --- | --- | --- | --- |
 |FCE|ResNet50_dcn|88.39%|82.18%|85.27%|[训练模型](https://paddleocr.bj.bcebos.com/contribution/det_r50_dcn_fce_ctw_v2.0_train.tar)|
 |DRRG|ResNet50_vd|89.92%|80.91%|85.18%|[训练模型](https://paddleocr.bj.bcebos.com/contribution/det_r50_drrg_ctw_train.tar)|
 
diff --git a/doc/doc_ch/benchmark.md b/doc/doc_ch/benchmark.md
index bd11838a5..da82d30ec 100644
--- a/doc/doc_ch/benchmark.md
+++ b/doc/doc_ch/benchmark.md
@@ -2,21 +2,21 @@
 
 本文给出了中英文OCR系列模型精度指标和在各平台预测耗时的benchmark。
 
-## 测试数据  
+## 测试数据
 针对OCR实际应用场景,包括合同,车牌,铭牌,火车票,化验单,表格,证书,街景文字,名片,数码显示屏等,收集的300张图像,每张图平均有17个文本框,下图给出了一些图像示例。
 
 <div align="center">
 <img src="../datasets/doc.jpg"  width = "1000" height = "500" />
 </div>
 
-## 评估指标  
+## 评估指标
 
 说明:
 
 - 检测输入图像的长边尺寸是960。
-- 评估耗时阶段为图像预测耗时,不包括图像的预处理和后处理。  
+- 评估耗时阶段为图像预测耗时,不包括图像的预处理和后处理。
 - `Intel至强6148`为服务器端CPU型号,测试中使用Intel MKL-DNN 加速。
-- `骁龙855`为移动端处理平台型号。  
+- `骁龙855`为移动端处理平台型号。
 
 预测模型大小和整体识别精度对比
 
diff --git a/doc/doc_ch/config.md b/doc/doc_ch/config.md
index 3430105fb..5ca1f68e6 100644
--- a/doc/doc_ch/config.md
+++ b/doc/doc_ch/config.md
@@ -13,7 +13,7 @@
 |         FLAG             |     支持脚本    |        用途        |      默认值       |         备注         |
 | :----------------------: | :------------: | :---------------: | :--------------: | :-----------------: |
 |          -c              |      ALL       |  指定配置文件  |  None  |  **配置模块说明请参考 参数介绍** |
-|          -o              |      ALL       |  设置配置文件里的参数内容  |  None  |  使用-o配置相较于-c选择的配置文件具有更高的优先级。例如:`-o Global.use_gpu=false`  |  
+|          -o              |      ALL       |  设置配置文件里的参数内容  |  None  |  使用-o配置相较于-c选择的配置文件具有更高的优先级。例如:`-o Global.use_gpu=false`  |
 
 <a name="2"></a>
 
diff --git a/doc/doc_ch/data_annotation.md b/doc/doc_ch/data_annotation.md
index da14b10d3..bd578f728 100644
--- a/doc/doc_ch/data_annotation.md
+++ b/doc/doc_ch/data_annotation.md
@@ -5,23 +5,23 @@
 ### 1. labelImg
 - 工具描述:矩形标注
 - 工具地址:https://github.com/tzutalin/labelImg
-- 示意图:  
-    ![](../datasets/labelimg.jpg)  
+- 示意图:
+    ![](../datasets/labelimg.jpg)
 
 ### 2. roLabelImg
 - 工具描述:基于labelImg重写的标注工具,支持旋转矩形标注
 - 工具地址:https://github.com/cgvict/roLabelImg
-- 示意图:  
-    ![](../datasets/roLabelImg.png)  
+- 示意图:
+    ![](../datasets/roLabelImg.png)
 
 ### 3. labelme
 - 工具描述:支持四点、多边形、圆形等多种标注
 - 工具地址:https://github.com/wkentaro/labelme
-- 示意图:  
-    ![](../datasets/labelme.jpg)  
+- 示意图:
+    ![](../datasets/labelme.jpg)
 
 ### 4. Vott
 - 工具描述:支持矩形,多边形等图片标注.支持视频标注.方便使用的快捷键以及比较好看的界面.同时支持导出多种标签格式.
 - 工具地址:https://github.com/microsoft/VoTT
-- 示意图:  
-    ![](../datasets/VoTT.jpg)  
+- 示意图:
+    ![](../datasets/VoTT.jpg)
diff --git a/doc/doc_ch/dataset/datasets.md b/doc/doc_ch/dataset/datasets.md
index 4166e8842..84fd9650b 100644
--- a/doc/doc_ch/dataset/datasets.md
+++ b/doc/doc_ch/dataset/datasets.md
@@ -9,16 +9,16 @@
 
 除了开源数据,用户还可使用合成工具自行合成,可参考[数据合成工具](../data_synthesis.md);
 
-如果需要标注自己的数据,可参考[数据标注工具](../data_annotation.md)。  
+如果需要标注自己的数据,可参考[数据标注工具](../data_annotation.md)。
 
 <a name="ICDAR2019-LSVT"></a>
 #### 1、ICDAR2019-LSVT
 - **数据来源**:https://ai.baidu.com/broad/introduction?dataset=lsvt
-- **数据简介**: 共45w中文街景图像,包含5w(2w测试+3w训练)全标注数据(文本坐标+文本内容),40w弱标注数据(仅文本内容),如下图所示:  
-    ![](../../datasets/LSVT_1.jpg)  
-    (a) 全标注数据  
-    ![](../../datasets/LSVT_2.jpg)  
-    (b) 弱标注数据  
+- **数据简介**: 共45w中文街景图像,包含5w(2w测试+3w训练)全标注数据(文本坐标+文本内容),40w弱标注数据(仅文本内容),如下图所示:
+    ![](../../datasets/LSVT_1.jpg)
+    (a) 全标注数据
+    ![](../../datasets/LSVT_2.jpg)
+    (b) 弱标注数据
 - **下载地址**:https://ai.baidu.com/broad/download?dataset=lsvt
 - **说明**:其中,test数据集的label目前没有开源,如要评估结果,可以去官网提交:https://rrc.cvc.uab.es/?ch=16
 
@@ -32,25 +32,25 @@
 <a name="中文街景文字识别"></a>
 #### 3、中文街景文字识别
 - **数据来源**:https://aistudio.baidu.com/aistudio/competition/detail/8
-- **数据简介**:ICDAR2019-LSVT行识别任务,共包括29万张图片,其中21万张图片作为训练集(带标注),8万张作为测试集(无标注)。数据集采自中国街景,并由街景图片中的文字行区域(例如店铺标牌、地标等等)截取出来而形成。所有图像都经过一些预处理,将文字区域利用仿射变化,等比映射为一张高为48像素的图片,如图所示:  
-    ![](../../datasets/ch_street_rec_1.png)  
-    (a) 标注:魅派集成吊顶  
-    ![](../../datasets/ch_street_rec_2.png)  
-    (b) 标注:母婴用品连锁  
+- **数据简介**:ICDAR2019-LSVT行识别任务,共包括29万张图片,其中21万张图片作为训练集(带标注),8万张作为测试集(无标注)。数据集采自中国街景,并由街景图片中的文字行区域(例如店铺标牌、地标等等)截取出来而形成。所有图像都经过一些预处理,将文字区域利用仿射变化,等比映射为一张高为48像素的图片,如图所示:
+    ![](../../datasets/ch_street_rec_1.png)
+    (a) 标注:魅派集成吊顶
+    ![](../../datasets/ch_street_rec_2.png)
+    (b) 标注:母婴用品连锁
 - **下载地址**
 https://aistudio.baidu.com/aistudio/datasetdetail/8429
 
 <a name="中文文档文字识别"></a>
 #### 4、中文文档文字识别
-- **数据来源**:https://github.com/YCG09/chinese_ocr  
-- **数据简介**:  
+- **数据来源**:https://github.com/YCG09/chinese_ocr
+- **数据简介**:
     - 共约364万张图片,按照99:1划分成训练集和验证集。
     - 数据利用中文语料库(新闻 + 文言文),通过字体、大小、灰度、模糊、透视、拉伸等变化随机生成
     - 包含汉字、英文字母、数字和标点共5990个字符(字符集合:https://github.com/YCG09/chinese_ocr/blob/master/train/char_std_5990.txt )
     - 每个样本固定10个字符,字符随机截取自语料库中的句子
-    - 图片分辨率统一为280x32  
-    ![](../../datasets/ch_doc1.jpg)  
-    ![](../../datasets/ch_doc3.jpg)  
+    - 图片分辨率统一为280x32
+    ![](../../datasets/ch_doc1.jpg)
+    ![](../../datasets/ch_doc3.jpg)
 - **下载地址**:https://pan.baidu.com/s/1QkI7kjah8SPHwOQ40rS1Pw (密码:lu7m)
 
 <a name="ICDAR2019-ArT"></a>
diff --git a/doc/doc_ch/enhanced_ctc_loss.md b/doc/doc_ch/enhanced_ctc_loss.md
index c85883747..4be16f322 100644
--- a/doc/doc_ch/enhanced_ctc_loss.md
+++ b/doc/doc_ch/enhanced_ctc_loss.md
@@ -9,7 +9,7 @@ Focal Loss 出自论文《Focal Loss for Dense Object Detection》, 该loss最
 <img src="./focal_loss_formula.png" width = "600" />
 </div>
 
-其中,  y' 是经过激活函数的输出,取值在0-1之间。其在原始的交叉熵损失的基础上加了一个调制系数(1 – y’)^ &gamma;和平衡因子&alpha;。 当&alpha; = 1,y=1时,其损失函数与交叉熵损失的对比如下图所示:  
+其中,  y' 是经过激活函数的输出,取值在0-1之间。其在原始的交叉熵损失的基础上加了一个调制系数(1 – y’)^ &gamma;和平衡因子&alpha;。 当&alpha; = 1,y=1时,其损失函数与交叉熵损失的对比如下图所示:
 <div align="center">
 <img src="./focal_loss_image.png" width = "600" />
 </div>
@@ -39,7 +39,7 @@ A-CTC Loss是CTC Loss + ACE Loss的简称。 其中ACE Loss出自论文< Aggrega
 </div>
 
 虽然ACELoss确实如上图所说,可以处理2D预测,在内存占用及推理速度方面具备优势,但在实践过程中,我们发现单独使用ACE Loss,  识别效果并不如CTCLoss.  因此,我们尝试将CTCLoss和ACELoss进行结合,同时以CTCLoss为主,将ACELoss 定位为一个辅助监督loss。 这一尝试收到了效果,在我们内部的实验数据集上,相比单独使用CTCLoss,识别准确率可以提升1%左右。
-A_CTC Loss定义如下:  
+A_CTC Loss定义如下:
 <div align="center">
 <img src="./equation_a_ctc.png" width = "300" />
 </div>
diff --git a/doc/doc_ch/kie.md b/doc/doc_ch/kie.md
index 6a47e86a1..0ebb05d0e 100644
--- a/doc/doc_ch/kie.md
+++ b/doc/doc_ch/kie.md
@@ -173,7 +173,7 @@ python3 tools/train.py -c configs/kie/vi_layoutxlm/re_vi_layoutxlm_xfund_zh.yml
 
 log 中自动打印如下信息:
 
-|  字段   |   含义   |  
+|  字段   |   含义   |
 | :----: | :------: |
 |  epoch | 当前迭代轮次 |
 |  iter  | 当前迭代次数 |
diff --git a/doc/doc_ch/knowledge_distillation.md b/doc/doc_ch/knowledge_distillation.md
index f9cbcbfa3..8c6cca5d1 100644
--- a/doc/doc_ch/knowledge_distillation.md
+++ b/doc/doc_ch/knowledge_distillation.md
@@ -405,7 +405,7 @@ Architecture:
         kernel_list: [7,2,2]
         k: 50
     Teacher:                      # 另外一个子网络,这里给的是DML蒸馏示例,
-      freeze_params: true  
+      freeze_params: true
       return_all_feats: false
       model_type: det
       algorithm: DB
@@ -430,7 +430,7 @@ Architecture:
 
 ```
 Architecture:
-  name: DistillationModel  
+  name: DistillationModel
   algorithm: Distillation
   model_type: det
   Models:
@@ -453,7 +453,7 @@ Architecture:
         kernel_list: [7,2,2]
         k: 50
     Student:                         # CML蒸馏的Student模型配置
-      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained  
+      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained
       freeze_params: false
       return_all_feats: false
       model_type: det
@@ -471,7 +471,7 @@ Architecture:
         name: DBHead
         k: 50
     Student2:                          # CML蒸馏的Student2模型配置
-      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained  
+      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained
       freeze_params: false
       return_all_feats: false
       model_type: det
diff --git a/doc/doc_ch/multi_languages.md b/doc/doc_ch/multi_languages.md
index 1f337bdf4..2eeb4b628 100644
--- a/doc/doc_ch/multi_languages.md
+++ b/doc/doc_ch/multi_languages.md
@@ -29,7 +29,7 @@ PaddleOCR 旨在打造一套丰富、领先、且实用的OCR工具库,不仅
 
 - [1 安装](#安装)
     - [1.1 paddle 安装](#paddle安装)
-    - [1.2 paddleocr package 安装](#paddleocr_package_安装)  
+    - [1.2 paddleocr package 安装](#paddleocr_package_安装)
 
 - [2 快速使用](#快速使用)
     - [2.1 命令行运行](#命令行运行)
diff --git a/doc/doc_ch/recognition.md b/doc/doc_ch/recognition.md
index 815d14995..642c80b32 100644
--- a/doc/doc_ch/recognition.md
+++ b/doc/doc_ch/recognition.md
@@ -229,7 +229,7 @@ python3 -m paddle.distributed.launch --gpus '0,1,2,3'  tools/train.py -c configs
 
 log 中自动打印如下信息:
 
-|  字段   |   含义   |  
+|  字段   |   含义   |
 | :----: | :------: |
 |  epoch | 当前迭代轮次 |
 |  iter  | 当前迭代次数 |
@@ -404,7 +404,7 @@ PaddleOCR目前已支持80种(除中文外)语种识别,`configs/rec/multi
 | :--------: |  :-------:   | :-------:  |   :-------:   |   :-----:   |  :-----:   | :-----:  |
 | rec_chinese_cht_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 中文繁体  |
 | rec_en_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 英语(区分大小写)   |
-| rec_french_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 法语 |  
+| rec_french_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 法语 |
 | rec_ger_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 德语   |
 | rec_japan_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 日语  |
 | rec_korean_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | 韩语  |
@@ -490,16 +490,16 @@ python3 -m paddle.distributed.launch --gpus '0' tools/eval.py -c configs/rec/PP-
 
 ```
 output/rec/
-├── best_accuracy.pdopt  
-├── best_accuracy.pdparams  
-├── best_accuracy.states  
-├── config.yml  
-├── iter_epoch_3.pdopt  
-├── iter_epoch_3.pdparams  
-├── iter_epoch_3.states  
-├── latest.pdopt  
-├── latest.pdparams  
-├── latest.states  
+├── best_accuracy.pdopt
+├── best_accuracy.pdparams
+├── best_accuracy.states
+├── config.yml
+├── iter_epoch_3.pdopt
+├── iter_epoch_3.pdparams
+├── iter_epoch_3.states
+├── latest.pdopt
+├── latest.pdparams
+├── latest.states
 └── train.log
 ```
 其中 best_accuracy.* 是评估集上的最优模型;iter_epoch_x.* 是以 `save_epoch_step` 为间隔保存下来的模型;latest.* 是最后一个epoch的模型。
diff --git a/doc/doc_ch/table_recognition.md b/doc/doc_ch/table_recognition.md
index 9460790f4..88260466d 100644
--- a/doc/doc_ch/table_recognition.md
+++ b/doc/doc_ch/table_recognition.md
@@ -112,7 +112,7 @@ python3 -m paddle.distributed.launch --gpus '0,1,2,3'  tools/train.py -c configs
 
 log 中自动打印如下信息:
 
-|  字段   |   含义   |  
+|  字段   |   含义   |
 | :----: | :------: |
 |  epoch | 当前迭代轮次 |
 |  global_step  | 当前迭代次数 |
@@ -290,13 +290,13 @@ python3 -m paddle.distributed.launch --gpus '0' tools/eval.py -c configs/table/S
 
 ```
 output/SLANet/
-├── best_accuracy.pdopt  
-├── best_accuracy.pdparams  
-├── best_accuracy.states  
-├── config.yml  
-├── latest.pdopt  
-├── latest.pdparams  
-├── latest.states  
+├── best_accuracy.pdopt
+├── best_accuracy.pdparams
+├── best_accuracy.states
+├── config.yml
+├── latest.pdopt
+├── latest.pdparams
+├── latest.states
 └── train.log
 ```
 其中 best_accuracy.* 是评估集上的最优模型;latest.* 是最后一个epoch的模型。
diff --git a/doc/doc_ch/training.md b/doc/doc_ch/training.md
index aadb30f60..37c2ec26e 100644
--- a/doc/doc_ch/training.md
+++ b/doc/doc_ch/training.md
@@ -78,12 +78,12 @@ Optimizer:
 ### 3.1 训练数据
 目前开源的模型,数据集和量级如下:
 
-    - 检测:  
-        - 英文数据集,ICDAR2015  
+    - 检测:
+        - 英文数据集,ICDAR2015
         - 中文数据集,LSVT街景数据集训练数据3w张图片
 
-    - 识别:  
-        - 英文数据集,MJSynth和SynthText合成数据,数据量上千万。  
+    - 识别:
+        - 英文数据集,MJSynth和SynthText合成数据,数据量上千万。
         - 中文数据集,LSVT街景数据集根据真值将图crop出来,并进行位置校准,总共30w张图像。此外基于LSVT的语料,合成数据500w。
         - 小语种数据集,使用不同语料和字体,分别生成了100w合成数据集,并使用ICDAR-MLT作为验证集。
 
@@ -145,7 +145,7 @@ PaddleOCR主要聚焦通用OCR,如果有垂类需求,您可以用PaddleOCR+
 
 - [文本检测模型训练](./detection.md)
 
-- [文本识别模型训练](./recognition.md)  
+- [文本识别模型训练](./recognition.md)
 
-- [文本方向分类器训练](./angle_class.md)  
+- [文本方向分类器训练](./angle_class.md)
 - [知识蒸馏](./knowledge_distillation.md)
diff --git a/doc/doc_ch/tree.md b/doc/doc_ch/tree.md
index c222bcb44..cc3530b3d 100644
--- a/doc/doc_ch/tree.md
+++ b/doc/doc_ch/tree.md
@@ -9,10 +9,10 @@ PaddleOCR
 │   │   ├── cls_mv3.yml                     // 训练配置相关,包括骨干网络、head、loss、优化器和数据
 │   ├── det                                 // 检测相关配置文件
 │   │   ├── det_mv3_db.yml                  // 训练配置
-│   │   ...  
+│   │   ...
 │   └── rec                                 // 识别相关配置文件
 │       ├── rec_mv3_none_bilstm_ctc.yml     // crnn 训练配置
-│       ...  
+│       ...
 ├── deploy                                  // 部署相关
 │   ├── android_demo                        // android_demo
 │   │   ...
@@ -32,7 +32,7 @@ PaddleOCR
 │   │   ├── readme.md                       // 说明文档
 │   │   ├── ...
 │   │   ├── src                             // 源文件
-│   │   │   ├── clipper.cpp  
+│   │   │   ├── clipper.cpp
 │   │   │   ├── config.cpp
 │   │   │   ├── main.cpp
 │   │   │   ├── ocr_cls.cpp
@@ -57,12 +57,12 @@ PaddleOCR
 │   ├── hubserving                          // hubserving
 │   │   ├── ocr_cls                         // 方向分类器
 │   │   │   ├── config.json                 // serving 配置
-│   │   │   ├── __init__.py  
+│   │   │   ├── __init__.py
 │   │   │   ├── module.py                   // 预测模型
 │   │   │   └── params.py                   // 预测参数
 │   │   ├── ocr_det                         // 文字检测
 │   │   │   ├── config.json                 // serving 配置
-│   │   │   ├── __init__.py  
+│   │   │   ├── __init__.py
 │   │   │   ├── module.py                   // 预测模型
 │   │   │   └── params.py                   // 预测参数
 │   │   ├── ocr_rec                         // 文字识别
@@ -102,7 +102,7 @@ PaddleOCR
 │   │   ├── readme.md                       // 说明文档
 │   │   ├── rec_local_server.py             // 识别 快速版
 │   │   └── rec_web_server.py               // 识别 完整版
-│   └── slim  
+│   └── slim
 │       └── quantization                    // 量化相关
 │           ├── export_model.py             // 导出模型
 │           ├── quant.py                    // 量化
@@ -185,7 +185,7 @@ PaddleOCR
 │   │   └── sast_postprocess.py             // SAST 后处理
 │   └── utils                               // 工具
 │       ├── dict                            // 小语种字典
-│            ....  
+│            ....
 │       ├── ic15_dict.txt                   // 英文数字字典,区分大小写
 │       ├── ppocr_keys_v1.txt               // 中文字典,用于训练中文模型
 │       ├── logging.py                      // logger
diff --git a/doc/doc_ch/whl.md b/doc/doc_ch/whl.md
index ba955c832..c8a53bf67 100644
--- a/doc/doc_ch/whl.md
+++ b/doc/doc_ch/whl.md
@@ -456,7 +456,7 @@ for idx in range(len(result)):
 |-------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------|
 | use_gpu                 | 是否使用GPU                                                                                                                                                                                                          | TRUE                    |
 | gpu_mem                 | 初始化占用的GPU内存大小                                                                                                                                                                                              | 8000M                   |
-| image_dir               | 通过命令行调用时执行预测的图片或文件夹路径                                                                                                                                                                           |  
+| image_dir               | 通过命令行调用时执行预测的图片或文件夹路径                                                                                                                                                                           |
 | page_num               | 当输入类型为pdf文件时有效,指定预测前面page_num页,默认预测所有页                     |        0                 |
 | det_algorithm           | 使用的检测算法类型                                                                                                                                                                                                   | DB                      |
 | det_model_dir          |  检测模型所在文件夹。传参方式有两种,1. None: 自动下载内置模型到 `~/.paddleocr/det`;2.自己转换好的inference模型路径,模型路径下必须包含model和params文件 |   None        |
diff --git a/doc/doc_en/FAQ_en.md b/doc/doc_en/FAQ_en.md
index b28e34206..1074c7371 100644
--- a/doc/doc_en/FAQ_en.md
+++ b/doc/doc_en/FAQ_en.md
@@ -1,28 +1,28 @@
 ## FAQ
 
-1. **Prediction error: got an unexpected keyword argument 'gradient_clip'**  
+1. **Prediction error: got an unexpected keyword argument 'gradient_clip'**
 The installed version of paddle is incorrect. Currently, this project only supports Paddle 1.7, which will be adapted to 1.8 in the near future.
 
-2. **Error when converting attention recognition model: KeyError: 'predict'**  
+2. **Error when converting attention recognition model: KeyError: 'predict'**
 Solved. Please update to the latest version of the code.
 
-3. **About inference speed**  
+3. **About inference speed**
 When there are many words in the picture, the prediction time will increase. You can use `--rec_batch_num` to set a smaller prediction batch num. The default value is 30, which can be changed to 10 or other values.
 
-4. **Service deployment and mobile deployment**  
+4. **Service deployment and mobile deployment**
 It is expected that the service deployment based on Serving and the mobile deployment based on Paddle Lite will be released successively in mid-to-late June. Stay tuned for more updates.
 
-5. **Release time of self-developed algorithm**  
+5. **Release time of self-developed algorithm**
 Baidu Self-developed algorithms such as SAST, SRN and end2end PSL will be released in June or July. Please be patient.
 
-6. **How to run on Windows or Mac?**  
+6. **How to run on Windows or Mac?**
 PaddleOCR has completed the adaptation to Windows and MAC systems. Two points should be noted during operation:
     1. In [Quick installation](./installation_en.md), if you do not want to install docker, you can skip the first step and start with the second step.
     2. When downloading the inference model, if wget is not installed, you can directly click the model link or copy the link address to the browser to download, then extract and place it in the corresponding directory.
 
-7. **The difference between ultra-lightweight model and General OCR model**  
+7. **The difference between ultra-lightweight model and General OCR model**
 At present, PaddleOCR has opensourced two Chinese models, namely 8.6M ultra-lightweight Chinese model and general Chinese OCR model. The comparison information between the two is as follows:
-    - Similarities: Both use the same **algorithm** and **training data**;  
+    - Similarities: Both use the same **algorithm** and **training data**;
     - Differences: The difference lies in **backbone network** and **channel parameters**, the ultra-lightweight model uses MobileNetV3 as the backbone network, the general model uses Resnet50_vd as the detection model backbone, and Resnet34_vd as the recognition model backbone. You can compare the two model training configuration files to see the differences in parameters.
 
 |Model|Backbone|Detection configuration file|Recognition configuration file|
@@ -30,29 +30,29 @@ At present, PaddleOCR has opensourced two Chinese models, namely 8.6M ultra-ligh
 |8.6M ultra-lightweight Chinese OCR model|MobileNetV3+MobileNetV3|det_mv3_db.yml|rec_chinese_lite_train.yml|
 |General Chinese OCR model|Resnet50_vd+Resnet34_vd|det_r50_vd_db.yml|rec_chinese_common_train.yml|
 
-8. **Is there a plan to opensource a model that only recognizes numbers or only English + numbers?**  
+8. **Is there a plan to opensource a model that only recognizes numbers or only English + numbers?**
 It is not planned to opensource numbers only, numbers + English only, or other vertical text models. PaddleOCR has opensourced a variety of detection and recognition algorithms for customized training. The two Chinese models are also based on the training output of the open-source algorithm library. You can prepare the data according to the tutorial, choose the appropriate configuration file, train yourselves, and we believe that you can get good result. If you have any questions during the training, you are welcome to open issues or ask in the communication group. We will answer them in time.
 
-9. **What is the training data used by the open-source model? Can it be opensourced?**  
+9. **What is the training data used by the open-source model? Can it be opensourced?**
 At present, the open source model, dataset and magnitude are as follows:
-    - Detection:  
-    English dataset: ICDAR2015  
+    - Detection:
+    English dataset: ICDAR2015
     Chinese dataset: LSVT street view dataset with 3w pictures
-    - Recognition:  
-    English dataset: MJSynth and SynthText synthetic dataset, the amount of data is tens of millions.  
+    - Recognition:
+    English dataset: MJSynth and SynthText synthetic dataset, the amount of data is tens of millions.
     Chinese dataset: LSVT street view dataset with cropped text area, a total of 30w images. In addition, the synthesized data based on LSVT corpus is 500w.
 
     Among them, the public datasets are opensourced, users can search and download by themselves, or refer to [Chinese data set](dataset/datasets_en.md), synthetic data is not opensourced, users can use open-source synthesis tools to synthesize data themselves. Current available synthesis tools include [text_renderer](https://github.com/Sanster/text_renderer), [SynthText](https://github.com/ankush-me/SynthText), [TextRecognitionDataGenerator](https://github.com/Belval/TextRecognitionDataGenerator), etc.
 
-10. **Error in using the model with TPS module for prediction**  
-Error message: Input(X) dims[3] and Input(Grid) dims[2] should be equal, but received X dimension[3]\(108) != Grid dimension[2]\(100)  
+10. **Error in using the model with TPS module for prediction**
+Error message: Input(X) dims[3] and Input(Grid) dims[2] should be equal, but received X dimension[3]\(108) != Grid dimension[2]\(100)
 Solution: TPS does not support variable shape. Please set --rec_image_shape='3,32,100' and --rec_char_type='en'
 
-11. **Custom dictionary used during training, the recognition results show that words do not appear in the dictionary**  
+11. **Custom dictionary used during training, the recognition results show that words do not appear in the dictionary**
 The used custom dictionary path is not set when making prediction. The solution is setting parameter `rec_char_dict_path` to the corresponding dictionary file.
 
-12. **Results of cpp_infer and python_inference are very different**  
+12. **Results of cpp_infer and python_inference are very different**
 Versions of exported inference model and inference library should be same. For example, on Windows platform, version of the inference library that PaddlePaddle provides is 1.8, but version of the inference model that PaddleOCR provides is 1.7, you should export model yourself(`tools/export_model.py`) on PaddlePaddle 1.8 and then use the exported model for inference.
 
-13. **How to identify artistic fonts in signs or advertising images**  
+13. **How to identify artistic fonts in signs or advertising images**
 Recognizing artistic fonts in signs or advertising images is a very challenging task because the variation in individual characters is much greater compared to standard fonts. If the artistic font to be identified is within a dictionary list, each word in the dictionary can be treated as a template for recognition using a general image retrieval system. You can try using PaddleClas image recognition system.
diff --git a/doc/doc_en/algorithm_det_sast_en.md b/doc/doc_en/algorithm_det_sast_en.md
index dde8eb32d..b39990d5b 100644
--- a/doc/doc_en/algorithm_det_sast_en.md
+++ b/doc/doc_en/algorithm_det_sast_en.md
@@ -50,7 +50,7 @@ Please refer to [text detection training tutorial](./detection_en.md). PaddleOCR
 
 <a name="4-1"></a>
 ### 4.1 Python Inference
-#### (1). Quadrangle text detection model (ICDAR2015)  
+#### (1). Quadrangle text detection model (ICDAR2015)
 First, convert the model saved in the SAST text detection training process into an inference model. Taking the model based on the Resnet50_vd backbone network and trained on the ICDAR2015 English dataset as an example ([model download link](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_icdar15_v2.0_train.tar)), you can use the following command to convert:
 
 ```
@@ -67,7 +67,7 @@ The visualized text detection results are saved to the `./inference_results` fol
 
 ![](../imgs_results/det_res_img_10_sast.jpg)
 
-#### (2). Curved text detection model (Total-Text)  
+#### (2). Curved text detection model (Total-Text)
 First, convert the model saved in the SAST text detection training process into an inference model. Taking the model based on the Resnet50_vd backbone network and trained on the Total-Text English dataset as an example ([model download link](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_totaltext_v2.0_train.tar)), you can use the following command to convert:
 
 ```
diff --git a/doc/doc_en/algorithm_e2e_pgnet_en.md b/doc/doc_en/algorithm_e2e_pgnet_en.md
index ccb5e6c07..d03731e39 100644
--- a/doc/doc_en/algorithm_e2e_pgnet_en.md
+++ b/doc/doc_en/algorithm_e2e_pgnet_en.md
@@ -81,7 +81,7 @@ Download and unzip [totaltext](https://paddleocr.bj.bcebos.com/dataset/total_tex
 /PaddleOCR/train_data/total_text/train/
   |- rgb/            # total_text training data of dataset
       |- img11.png
-      | ...  
+      | ...
   |- train.txt       # total_text training annotation of dataset
 ```
 
diff --git a/doc/doc_en/algorithm_overview_en.md b/doc/doc_en/algorithm_overview_en.md
index 92b1e4f71..4cffcfd41 100755
--- a/doc/doc_en/algorithm_overview_en.md
+++ b/doc/doc_en/algorithm_overview_en.md
@@ -55,7 +55,7 @@ On Total-Text dataset, the text detection result is as follows:
 On CTW1500 dataset, the text detection result is as follows:
 
 |Model|Backbone|Precision|Recall|Hmean| Download link|
-| --- | --- | --- | --- | --- |---|  
+| --- | --- | --- | --- | --- |---|
 |FCE|ResNet50_dcn|88.39%|82.18%|85.27%| [trained model](https://paddleocr.bj.bcebos.com/contribution/det_r50_dcn_fce_ctw_v2.0_train.tar) |
 |DRRG|ResNet50_vd|89.92%|80.91%|85.18%|[trained model](https://paddleocr.bj.bcebos.com/contribution/det_r50_drrg_ctw_train.tar)|
 
diff --git a/doc/doc_en/data_annotation_en.md b/doc/doc_en/data_annotation_en.md
index 176aa6d53..71c8f4f61 100644
--- a/doc/doc_en/data_annotation_en.md
+++ b/doc/doc_en/data_annotation_en.md
@@ -4,17 +4,17 @@ There are the commonly used data annotation tools, which will be continuously up
 ### 1. labelImg
 - Tool description: Rectangular label
 - Tool address:  https://github.com/tzutalin/labelImg
-- Sketch diagram:  
+- Sketch diagram:
 ![labelimg](../datasets/labelimg.jpg)
 
 ### 2. roLabelImg
 - Tool description: Label tool rewritten based on labelImg, supporting rotating rectangular label
 - Tool address:   https://github.com/cgvict/roLabelImg
-- Sketch diagram:  
+- Sketch diagram:
 ![roLabelImg](../datasets/roLabelImg.png)
 
 ### 3. labelme
 - Tool description: Support four points, polygons, circles and other labels
 - Tool address:   https://github.com/wkentaro/labelme
-- Sketch diagram:  
+- Sketch diagram:
 ![labelme](../datasets/labelme.jpg)
diff --git a/doc/doc_en/dataset/datasets_en.md b/doc/doc_en/dataset/datasets_en.md
index d81c058ca..2a7c9c5df 100644
--- a/doc/doc_en/dataset/datasets_en.md
+++ b/doc/doc_en/dataset/datasets_en.md
@@ -33,24 +33,24 @@ In addition to opensource data, users can also use synthesis tools to synthesize
 - **Data sources**:https://aistudio.baidu.com/aistudio/competition/detail/8
 - **Introduction**:A total of 290000 pictures are included, of which 210000 are used as training sets (with labels) and 80000 are used as test sets (without labels). The dataset is collected from the Chinese street view, and is formed by by cutting out the text line area (such as shop signs, landmarks, etc.) in the street view picture. All the images are preprocessed: by using affine transform, the text area is proportionally mapped to a picture with a height of 48 pixels, as shown in the figure:
 
-    ![](../../datasets/ch_street_rec_1.png)  
-    (a) Label: 魅派集成吊顶  
-    ![](../../datasets/ch_street_rec_2.png)  
-    (b) Label: 母婴用品连锁  
+    ![](../../datasets/ch_street_rec_1.png)
+    (a) Label: 魅派集成吊顶
+    ![](../../datasets/ch_street_rec_2.png)
+    (b) Label: 母婴用品连锁
 - **Download link**
 https://aistudio.baidu.com/aistudio/datasetdetail/8429
 
 <a name="中文文档文字识别"></a>
 #### 4. Chinese Document Text Recognition
-- **Data sources**:https://github.com/YCG09/chinese_ocr  
-- **Introduction**:  
+- **Data sources**:https://github.com/YCG09/chinese_ocr
+- **Introduction**:
     - A total of 3.64 million pictures are divided into training set and validation set according to 99:1.
     - Using Chinese corpus (news + classical Chinese), the data is randomly generated through changes in font, size, grayscale, blur, perspective, stretching, etc.
     - 5990 characters including Chinese characters, English letters, numbers and punctuation(Characters set: https://github.com/YCG09/chinese_ocr/blob/master/train/char_std_5990.txt )
     - Each sample is fixed with 10 characters, and the characters are randomly intercepted from the sentences in the corpus
-    - Image resolution is 280x32  
-    ![](../../datasets/ch_doc1.jpg)  
-    ![](../../datasets/ch_doc3.jpg)  
+    - Image resolution is 280x32
+    ![](../../datasets/ch_doc1.jpg)
+    ![](../../datasets/ch_doc3.jpg)
 - **Download link**:https://pan.baidu.com/s/1QkI7kjah8SPHwOQ40rS1Pw (Password: lu7m)
 
 <a name="ICDAR2019-ArT"></a>
diff --git a/doc/doc_en/inference_en.md b/doc/doc_en/inference_en.md
index 95ac2c96b..ec52ed9d4 100755
--- a/doc/doc_en/inference_en.md
+++ b/doc/doc_en/inference_en.md
@@ -203,7 +203,7 @@ The visualized text detection results are saved to the `./inference_results` fol
 
 <a name="SAST_DETECTION"></a>
 ### 2.4 Sast Text Detection Model Inference
-#### (1). Quadrangle text detection model (ICDAR2015)  
+#### (1). Quadrangle text detection model (ICDAR2015)
 First, convert the model saved in the SAST text detection training process into an inference model. Taking the model based on the Resnet50_vd backbone network and trained on the ICDAR2015 English dataset as an example ([model download link](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_icdar15_v2.0_train.tar)), you can use the following command to convert:
 
 ```
@@ -220,7 +220,7 @@ The visualized text detection results are saved to the `./inference_results` fol
 
 ![](../imgs_results/det_res_img_10_sast.jpg)
 
-#### (2). Curved text detection model (Total-Text)  
+#### (2). Curved text detection model (Total-Text)
 First, convert the model saved in the SAST text detection training process into an inference model. Taking the model based on the Resnet50_vd backbone network and trained on the Total-Text English dataset as an example ([model download link](https://paddleocr.bj.bcebos.com/dygraph_v2.0/en/det_r50_vd_sast_totaltext_v2.0_train.tar)), you can use the following command to convert:
 
 ```
diff --git a/doc/doc_en/knowledge_distillation_en.md b/doc/doc_en/knowledge_distillation_en.md
index 52725e5c0..334a8c4ba 100755
--- a/doc/doc_en/knowledge_distillation_en.md
+++ b/doc/doc_en/knowledge_distillation_en.md
@@ -267,14 +267,14 @@ Loss:
       model_name_list: ["Student", "Teacher"]  # For the prediction results of the distillation model, extract the output of these two sub-networks and calculate the CTC loss with gt
       key: head_out                            # In the sub-network output dict, take the corresponding tensor
   - DistillationDMLLoss:                       # DML loss function, inherited from the standard DMLLoss
-      weight: 1.0  
+      weight: 1.0
       act: "softmax"                           # Activation function, use it to process the input, can be softmax, sigmoid or None, the default is None
       model_name_pairs:                        # The subnet name pair used to calculate DML loss. If you want to calculate the DML loss of other subnets, you can continue to add it below the list
       - ["Student", "Teacher"]
       key: head_out
       multi_head: True                         # whether to use mult_head
       dis_head: ctc                            # assign the head name to calculate loss
-      name: dml_ctc                            # prefix name of the loss  
+      name: dml_ctc                            # prefix name of the loss
   - DistillationDMLLoss:                       # DML loss function, inherited from the standard DMLLoss
       weight: 0.5
       act: "softmax"                           # Activation function, use it to process the input, can be softmax, sigmoid or None, the default is None
@@ -285,11 +285,11 @@ Loss:
       dis_head: sar                            # assign the head name to calculate loss
       name: dml_sar                            # prefix name of the loss
   - DistillationDistanceLoss:                  # Distilled distance loss function
-      weight: 1.0  
+      weight: 1.0
       mode: "l2"                               # Support l1, l2 or smooth_l1
       model_name_pairs:                        # Calculate the distance loss of the subnet name pair
       - ["Student", "Teacher"]
-      key: backbone_out  
+      key: backbone_out
   - DistillationSARLoss:                       # SAR loss function based on distillation, inherited from standard SAR loss
       weight: 1.0                              # The weight of the loss function. In loss_config_list, each loss function must include this field
       model_name_list: ["Student", "Teacher"]  # For the prediction results of the distillation model, extract the output of these two sub-networks and calculate the SAR loss with gt
@@ -445,7 +445,7 @@ The following describes the configuration file parameters [ch_PP-OCRv3_det_cml.y
 
 ```
 Architecture:
-  name: DistillationModel  
+  name: DistillationModel
   algorithm: Distillation
   model_type: det
   Models:
@@ -468,7 +468,7 @@ Architecture:
         kernel_list: [7,2,2]
         k: 50
     Student:                         # Student model configuration for CML distillation
-      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained  
+      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained
       freeze_params: false
       return_all_feats: false
       model_type: det
@@ -486,7 +486,7 @@ Architecture:
         name: DBHead
         k: 50
     Student2:                          # Student2 model configuration for CML distillation
-      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained  
+      pretrained: ./pretrain_models/MobileNetV3_large_x0_5_pretrained
       freeze_params: false
       return_all_feats: false
       model_type: det
diff --git a/doc/doc_en/recognition_en.md b/doc/doc_en/recognition_en.md
index 742f2ee8a..ad82e34de 100644
--- a/doc/doc_en/recognition_en.md
+++ b/doc/doc_en/recognition_en.md
@@ -3,7 +3,7 @@
 - [1. Data Preparation](#DATA_PREPARATION)
   * [1.1 Custom Dataset](#Custom_Dataset)
   * [1.2 Dataset Download](#Dataset_download)
-  * [1.3 Dictionary](#Dictionary)  
+  * [1.3 Dictionary](#Dictionary)
   * [1.4 Add Space Category](#Add_space_category)
   * [1.5 Data Augmentation](#Data_Augmentation)
 - [2. Training](#TRAINING)
@@ -323,7 +323,7 @@ Currently, the multi-language algorithms supported by PaddleOCR are:
 | :--------: |  :-------:   | :-------:  |   :-------:   |   :-----:   |  :-----:   | :-----:  |
 | rec_chinese_cht_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | chinese traditional  |
 | rec_en_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | English(Case sensitive)   |
-| rec_french_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | French |  
+| rec_french_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | French |
 | rec_ger_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | German   |
 | rec_japan_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | Japanese |
 | rec_korean_lite_train.yml |  CRNN |   Mobilenet_v3 small 0.5 |  None   |  BiLSTM |  ctc  | Korean  |
@@ -417,16 +417,16 @@ According to the `save_model_dir` and `save_epoch_step` fields set in the config
 
 ```
 output/rec/
-├── best_accuracy.pdopt  
-├── best_accuracy.pdparams  
-├── best_accuracy.states  
-├── config.yml  
-├── iter_epoch_3.pdopt  
-├── iter_epoch_3.pdparams  
-├── iter_epoch_3.states  
-├── latest.pdopt  
-├── latest.pdparams  
-├── latest.states  
+├── best_accuracy.pdopt
+├── best_accuracy.pdparams
+├── best_accuracy.states
+├── config.yml
+├── iter_epoch_3.pdopt
+├── iter_epoch_3.pdparams
+├── iter_epoch_3.states
+├── latest.pdopt
+├── latest.pdparams
+├── latest.states
 └── train.log
 ```
 
diff --git a/doc/doc_en/table_recognition_en.md b/doc/doc_en/table_recognition_en.md
index df8da92da..c53ec1c0d 100644
--- a/doc/doc_en/table_recognition_en.md
+++ b/doc/doc_en/table_recognition_en.md
@@ -114,7 +114,7 @@ After starting training normally, you will see the following log output:
 
 The following information is automatically printed in the log:
 
-|  Field   |   Meaning   |  
+|  Field   |   Meaning   |
 | :----: | :------: |
 |  epoch | current iteration round |
 |  global_step  | current iteration count |
@@ -299,13 +299,13 @@ According to the `save_model_dir` and `save_epoch_step` fields set in the config
 
 ```
 output/SLANet/
-├── best_accuracy.pdopt  
-├── best_accuracy.pdparams  
-├── best_accuracy.states  
-├── config.yml  
-├── latest.pdopt  
-├── latest.pdparams  
-├── latest.states  
+├── best_accuracy.pdopt
+├── best_accuracy.pdparams
+├── best_accuracy.states
+├── config.yml
+├── latest.pdopt
+├── latest.pdparams
+├── latest.states
 └── train.log
 ```
 Among them, best_accuracy.* is the best model on the evaluation set; latest.* is the model of the last epoch.
diff --git a/doc/doc_en/tree_en.md b/doc/doc_en/tree_en.md
index cf9ccb38d..cb188bff5 100644
--- a/doc/doc_en/tree_en.md
+++ b/doc/doc_en/tree_en.md
@@ -10,10 +10,10 @@ PaddleOCR
 │   │   ├── cls_mv3.yml                     // Training config, including backbone network, head, loss, optimizer and data
 │   ├── det                                 // Text detection config files
 │   │   ├── det_mv3_db.yml                  // Training config
-│   │   ...  
+│   │   ...
 │   └── rec                                 // Text recognition config files
 │       ├── rec_mv3_none_bilstm_ctc.yml     // CRNN config
-│       ...  
+│       ...
 ├── deploy                                  // Depoly
 │   ├── android_demo                        // Android demo
 │   │   ...
@@ -33,7 +33,7 @@ PaddleOCR
 │   │   ├── readme.md                       // Documentation
 │   │   ├── ...
 │   │   ├── src                             // Source code files
-│   │   │   ├── clipper.cpp  
+│   │   │   ├── clipper.cpp
 │   │   │   ├── config.cpp
 │   │   │   ├── main.cpp
 │   │   │   ├── ocr_cls.cpp
@@ -58,12 +58,12 @@ PaddleOCR
 │   ├── hubserving                          // hubserving
 │   │   ├── ocr_cls                         // Angle class
 │   │   │   ├── config.json                 // Serving config
-│   │   │   ├── __init__.py  
+│   │   │   ├── __init__.py
 │   │   │   ├── module.py                   // Model
 │   │   │   └── params.py                   // Parameters
 │   │   ├── ocr_det                         // Text detection
 │   │   │   ├── config.json                 // serving config
-│   │   │   ├── __init__.py  
+│   │   │   ├── __init__.py
 │   │   │   ├── module.py                   // Model
 │   │   │   └── params.py                   // Parameters
 │   │   ├── ocr_rec                         // Text recognition
@@ -103,7 +103,7 @@ PaddleOCR
 │   │   ├── readme.md                       // Documentation
 │   │   ├── rec_local_server.py             // Text recognition fast version
 │   │   └── rec_web_server.py               // Text recognition full version
-│   └── slim  
+│   └── slim
 │       └── quantization                    // Quantization
 │           ├── export_model.py             // Export model
 │           ├── quant.py                    // Quantization script
@@ -188,7 +188,7 @@ PaddleOCR
 │   │   └── sast_postprocess.py             // SAST post-processing
 │   └── utils                               // utils
 │       ├── dict                            // Minor language dictionary
-│            ....  
+│            ....
 │       ├── ic15_dict.txt                   // English number dictionary, case sensitive
 │       ├── ppocr_keys_v1.txt               // Chinese dictionary for training Chinese models
 │       ├── logging.py                      // logger
diff --git a/doc/doc_en/tricks_en.md b/doc/doc_en/tricks_en.md
index 4d59857a0..92edce266 100644
--- a/doc/doc_en/tricks_en.md
+++ b/doc/doc_en/tricks_en.md
@@ -50,7 +50,7 @@ Here we have sorted out some Chinese OCR training and prediction tricks, which a
         resized_image /= 0.5
         padding_im = np.zeros((imgC, imgH, imgW), dtype=np.float32)
         padding_im[:, :, 0:resized_w] = resized_image
-        return padding_im  
+        return padding_im
   ```
 
 <a name="SpaceRecognition"></a>
diff --git a/doc/doc_i18n/README_Ру́сский_язы́к.md b/doc/doc_i18n/README_Ру́сский_язы́к.md
index 11fe5d451..79389845c 100644
--- a/doc/doc_i18n/README_Ру́сский_язы́к.md
+++ b/doc/doc_i18n/README_Ру́сский_язы́к.md
@@ -43,13 +43,13 @@ PaddleOCR стремится создавать многоязычные, пот
 - **🔥2022.7 Выпуск [Коллекция приложений сцены OCR](../../applications/README_en.md)**
 - Выпуск **9 вертикальных моделей**, таких как цифровая трубка, ЖК-экран, номерной знак, модель распознавания рукописного ввода, высокоточная модель SVTR и т. д., охватывающих основные вертикальные приложения OCR в целом, производственной, финансовой и транспортной отраслях.
 - **🔥2022.5.9 Выпуск PaddleOCR [Выпуск /2.5](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.5)**
-- Выпускать [PP-OCRv3](../doc_en/ppocr_introduction_en.md#pp-ocrv3): При сопоставимой скорости эффект китайской сцены улучшен на 5% по сравнению с ПП-OCRRv2, эффект английской сцены улучшен на 11%, а средняя точность распознавания 80 языковых многоязычных моделей улучшена более чем на 5%.  
+- Выпускать [PP-OCRv3](../doc_en/ppocr_introduction_en.md#pp-ocrv3): При сопоставимой скорости эффект китайской сцены улучшен на 5% по сравнению с ПП-OCRRv2, эффект английской сцены улучшен на 11%, а средняя точность распознавания 80 языковых многоязычных моделей улучшена более чем на 5%.
  - Выпускать [PPOCRLabelv2](https://github.com/PFCCLab/PPOCRLabel/blob/main/README.md): Добавьте функцию аннотации для задачи распознавания таблиц, задачи извлечения ключевой информации и изображения неправильного текста.
      - Выпустить интерактивную электронную книгу [*"Погружение в OCR"*](../doc_en/ocr_book_en.md), охватывает передовую теорию и практику кодирования технологии полного стека OCR.
 - [подробнее](../doc_en/update_en.md)
 
 
-## 🌟 Функции  
+## 🌟 Функции
 
 PaddleOCR поддерживает множество передовых алгоритмов, связанных с распознаванием текста, и разработала промышленные модели/решения. [PP-OCR](../doc_en/ppocr_introduction_en.md) и [PP-Structure](./ppstructure/README.md) на этой основе и пройти весь процесс производства данных, обучения модели, сжатия, логического вывода и развертывания.
 
@@ -119,11 +119,11 @@ paddleocr --image_dir /your/test/image.jpg --lang=ru
         - [Мобильный](./deploy/lite/readme.md)
         - [Paddle2ONNX](./deploy/paddle2onnx/readme.md)
         -[ВеслоОблако](./deploy/paddlecloud/README.md)
-        - [Benchmark](../doc_en/benchmark_en.md)  
+        - [Benchmark](../doc_en/benchmark_en.md)
 - [PP-Structure 🔥](../../ppstructure/README.md)
   - [Быстрый старт](../../ppstructure/docs/quickstart_en.md)
     - [Модель Zoo](../../ppstructure/docs/models_list_en.md)
-    - [Модель тренировки](../doc_en/training_en.md)  
+    - [Модель тренировки](../doc_en/training_en.md)
    - [Анализ макета](../../ppstructure/layout/README.md)
         - [Распознавание таблиц](../../ppstructure/table/README.md)
         - [Извлечение ключевой информации](../../ppstructure/kie/README.md)
@@ -215,7 +215,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=ru
 3. RE (Извлечение отношений)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
 </div>
diff --git a/doc/doc_i18n/README_हिन्द.md b/doc/doc_i18n/README_हिन्द.md
index c493327dc..0598647c8 100644
--- a/doc/doc_i18n/README_हिन्द.md
+++ b/doc/doc_i18n/README_हिन्द.md
@@ -120,7 +120,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=hi
 - [PP-Structure 🔥](../../ppstructure/README.md)
     - [क्विक स्टार्ट](../../ppstructure/docs/quickstart_en.md)
     - [मॉडल जू](../../ppstructure/docs/models_list_en.md)
-    - [मॉडल ट्रेनिंग](../doc_en/training_en.md)  
+    - [मॉडल ट्रेनिंग](../doc_en/training_en.md)
         - [लेआउट एनालाइस](../../ppstructure/layout/README.md)
         - [टेबल रिकोगनाइजेशन](../../ppstructure/table/README.md)
         - [की इंफॉर्मेशन एक्स्ट्रेक्शन](../../ppstructure/kie/README.md)
@@ -133,7 +133,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=hi
     - [टेक्स्ट रिकोगनाइजेशन](../doc_en/algorithm_overview_en.md)
     - [एंड-टू-एंड ओसीआर](../doc_en/algorithm_overview_en.md)
     - [टेबल रिकोगनाइजेशन](../doc_en/algorithm_overview_en.md)
-    - [की इंफॉर्मेशन एक्स्ट्रेक्शन](../doc_en/algorithm_overview_en.md)  
+    - [की इंफॉर्मेशन एक्स्ट्रेक्शन](../doc_en/algorithm_overview_en.md)
     - [पैडलओसीआर में नए एल्गोरिदम जोड़ें](../doc_en/add_new_algorithm_en.md)
 - डेटा एनोटेशन और सिंथेसिस
     - [सेमी-ऑटोमैटिक एनोटेशन टूल: PPओसीआरलेबल](https://github.com/PFCCLab/PPOCRLabel/blob/main/README.md)
@@ -213,7 +213,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=hi
 3. RE (रिलेशन एक्सट्रैक्शन)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
 </div>
diff --git a/doc/doc_i18n/README_日本語.md b/doc/doc_i18n/README_日本語.md
index 215c825c7..7d3121f3e 100644
--- a/doc/doc_i18n/README_日本語.md
+++ b/doc/doc_i18n/README_日本語.md
@@ -116,11 +116,11 @@ paddleocr --image_dir /your/test/image.jpg --lang=japan # change for i18n abbr
         - [モバイル](./deploy/lite/readme.md)
         - [Paddle2ONNX](./deploy/paddle2onnx/readme.md)
         - [PaddleCloud](./deploy/paddlecloud/README.md)
-        - [Benchmark](../doc_en/benchmark_en.md)  
+        - [Benchmark](../doc_en/benchmark_en.md)
 - [PP-Structure 🔥](../../ppstructure/README.md)
     - [クイックスタート](../../ppstructure/docs/quickstart_en.md)
     - [Model Zoo](../../ppstructure/docs/models_list_en.md)
-    - [トレーニング モデル](../doc_en/training_en.md)  
+    - [トレーニング モデル](../doc_en/training_en.md)
         - [レイアウト分析](../../ppstructure/layout/README.md)
         - [表認識](../../ppstructure/table/README.md)
         - [キー情報抽出](../../ppstructure/kie/README.md)
@@ -133,7 +133,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=japan # change for i18n abbr
     - [テキスト認識](../doc_en/algorithm_overview_en.md)
     - [エンド・ツー・エンド OCR](../doc_en/algorithm_overview_en.md)
     - [表認識](../doc_en/algorithm_overview_en.md)
-    - [キー情報抽出](../doc_en/algorithm_overview_en.md)  
+    - [キー情報抽出](../doc_en/algorithm_overview_en.md)
     - [PaddleOCR に新しいアルゴリズムを追加する](../doc_en/add_new_algorithm_en.md)
 - データの注釈と合成
     - [半自動注釈ツール: PPOCRLabel](https://github.com/PFCCLab/PPOCRLabel/blob/main/README.md)
@@ -212,7 +212,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=japan # change for i18n abbr
 3. RE (関係抽出)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
 </div>
diff --git a/doc/doc_i18n/README_한국어.md b/doc/doc_i18n/README_한국어.md
index 30ff079ab..e9af69f5d 100644
--- a/doc/doc_i18n/README_한국어.md
+++ b/doc/doc_i18n/README_한국어.md
@@ -114,11 +114,11 @@ paddleocr --image_dir /your/test/image.jpg --lang=korean
         - [모바일](./deploy/lite/readme.md)
         - [Paddle2ONNX](./deploy/paddle2onnx/readme.md)
         - [패들 클라우드](./deploy/paddlecloud/README.md)
-        - [Benchmark](../doc_en/benchmark_en.md)  
+        - [Benchmark](../doc_en/benchmark_en.md)
 - [PP-Structure 🔥](../../ppstructure/README.md)
     - [신속한 시작](../../ppstructure/docs/quickstart_en.md)
     - [동물원 모델](../../ppstructure/docs/models_list_en.md)
-    - [모델 훈련](../doc_en/training_en.md)  
+    - [모델 훈련](../doc_en/training_en.md)
         - [레이아웃 분석](../../ppstructure/layout/README.md)
         - [표 인식](../../ppstructure/table/README.md)
         - [핵심 정보 추출](../../ppstructure/kie/README.md)
@@ -131,7 +131,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=korean
     - [텍스트 인식](../doc_en/algorithm_overview_en.md)
     - [종단종OCR](../doc_en/algorithm_overview_en.md)
     - [표 인식](../doc_en/algorithm_overview_en.md)
-    - [핵심 정보 추출](../doc_en/algorithm_overview_en.md)  
+    - [핵심 정보 추출](../doc_en/algorithm_overview_en.md)
     - [PaddleOCR에 신규 알고리즘 추가](../doc_en/add_new_algorithm_en.md)
 -  데이터 주석 및 합성
     - [반-자동 주석 툴: PPOCRLabel](https://github.com/PFCCLab/PPOCRLabel/blob/main/README.md)
@@ -210,7 +210,7 @@ paddleocr --image_dir /your/test/image.jpg --lang=korean
 3. RE (관계 추출)
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 <div align="center">
     <img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
 </div>
diff --git a/ppocr/data/imaug/drrg_targets.py b/ppocr/data/imaug/drrg_targets.py
index 5a89754aa..8eeef2640 100644
--- a/ppocr/data/imaug/drrg_targets.py
+++ b/ppocr/data/imaug/drrg_targets.py
@@ -439,9 +439,9 @@ class DRRGTargets(object):
             )
 
         inner_center_sample_mask = np.zeros_like(center_sample_mask)
-        inner_center_sample_mask[
-            margin : h - margin, margin : w - margin
-        ] = center_sample_mask[margin : h - margin, margin : w - margin]
+        inner_center_sample_mask[margin : h - margin, margin : w - margin] = (
+            center_sample_mask[margin : h - margin, margin : w - margin]
+        )
         kernel_size = int(np.clip(max_rand_half_height, 7, 21))
         inner_center_sample_mask = cv2.erode(
             inner_center_sample_mask, np.ones((kernel_size, kernel_size), np.uint8)
diff --git a/ppocr/data/imaug/east_process.py b/ppocr/data/imaug/east_process.py
index 76515d5a4..d306be732 100644
--- a/ppocr/data/imaug/east_process.py
+++ b/ppocr/data/imaug/east_process.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refered from: 
+This code is refered from:
 https://github.com/songdejia/EAST/blob/master/data_utils.py
 """
 import math
diff --git a/ppocr/data/imaug/sast_process.py b/ppocr/data/imaug/sast_process.py
index 91e1f9b22..d3e203f9f 100644
--- a/ppocr/data/imaug/sast_process.py
+++ b/ppocr/data/imaug/sast_process.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This part code is refered from: 
+This part code is refered from:
 https://github.com/songdejia/EAST/blob/master/data_utils.py
 """
 import math
diff --git a/ppocr/data/imaug/table_ops.py b/ppocr/data/imaug/table_ops.py
index ac21c3077..83b119cd5 100644
--- a/ppocr/data/imaug/table_ops.py
+++ b/ppocr/data/imaug/table_ops.py
@@ -48,10 +48,14 @@ class GenTableMask(object):
         in_text = False  # 是否遍历到了字符区内
         box_list = []
         for i in range(len(project_val_array)):
-            if in_text == False and project_val_array[i] > spilt_threshold:  # 进入字符区了
+            if (
+                in_text == False and project_val_array[i] > spilt_threshold
+            ):  # 进入字符区了
                 in_text = True
                 start_idx = i
-            elif project_val_array[i] <= spilt_threshold and in_text == True:  # 进入空白区了
+            elif (
+                project_val_array[i] <= spilt_threshold and in_text == True
+            ):  # 进入空白区了
                 end_idx = i
                 in_text = False
                 if end_idx - start_idx <= 2:
@@ -95,10 +99,14 @@ class GenTableMask(object):
         box_list = []
         spilt_threshold = 0
         for i in range(len(project_val_array)):
-            if in_text == False and project_val_array[i] > spilt_threshold:  # 进入字符区了
+            if (
+                in_text == False and project_val_array[i] > spilt_threshold
+            ):  # 进入字符区了
                 in_text = True
                 start_idx = i
-            elif project_val_array[i] <= spilt_threshold and in_text == True:  # 进入空白区了
+            elif (
+                project_val_array[i] <= spilt_threshold and in_text == True
+            ):  # 进入空白区了
                 end_idx = i
                 in_text = False
                 if end_idx - start_idx <= 2:
diff --git a/ppocr/ext_op/__init__.py b/ppocr/ext_op/__init__.py
index 8307f3810..b0be1ada0 100644
--- a/ppocr/ext_op/__init__.py
+++ b/ppocr/ext_op/__init__.py
@@ -1 +1 @@
-from .roi_align_rotated.roi_align_rotated import RoIAlignRotated
+from .roi_align_rotated.roi_align_rotated import RoIAlignRotated
diff --git a/ppocr/ext_op/roi_align_rotated/roi_align_rotated.cu b/ppocr/ext_op/roi_align_rotated/roi_align_rotated.cu
index b04766807..d9baefb7d 100644
--- a/ppocr/ext_op/roi_align_rotated/roi_align_rotated.cu
+++ b/ppocr/ext_op/roi_align_rotated/roi_align_rotated.cu
@@ -378,4 +378,4 @@ std::vector<paddle::Tensor> RoIAlignRotatedCUDABackward(
                 grad_input.data<data_t>());
       }));
   return {grad_input};
-}
\ No newline at end of file
+}
diff --git a/ppocr/losses/basic_loss.py b/ppocr/losses/basic_loss.py
index 8ecfd20af..e85911a14 100644
--- a/ppocr/losses/basic_loss.py
+++ b/ppocr/losses/basic_loss.py
@@ -231,9 +231,7 @@ class DKDLoss(nn.Layer):
         pred_student = self._cat_mask(pred_student, gt_mask, other_mask)
         pred_teacher = self._cat_mask(pred_teacher, gt_mask, other_mask)
         log_pred_student = paddle.log(pred_student)
-        tckd_loss = self._kl_div(log_pred_student, pred_teacher) * (
-            self.temperature**2
-        )
+        tckd_loss = self._kl_div(log_pred_student, pred_teacher) * (self.temperature**2)
         pred_teacher_part2 = F.softmax(
             logits_teacher / self.temperature - 1000.0 * gt_mask, axis=1
         )
diff --git a/ppocr/losses/distillation_loss.py b/ppocr/losses/distillation_loss.py
index 98c9c546a..6d6d4923e 100644
--- a/ppocr/losses/distillation_loss.py
+++ b/ppocr/losses/distillation_loss.py
@@ -113,9 +113,9 @@ class DistillationDMLLoss(DMLLoss):
                     loss = super().forward(out1, out2)
                 if isinstance(loss, dict):
                     for key in loss:
-                        loss_dict[
-                            "{}_{}_{}_{}".format(key, pair[0], pair[1], idx)
-                        ] = loss[key]
+                        loss_dict["{}_{}_{}_{}".format(key, pair[0], pair[1], idx)] = (
+                            loss[key]
+                        )
                 else:
                     loss_dict["{}_{}".format(self.name, idx)] = loss
             else:
@@ -218,9 +218,9 @@ class DistillationKLDivLoss(KLDivLoss):
                     loss = super().forward(out1, out2)
                 if isinstance(loss, dict):
                     for key in loss:
-                        loss_dict[
-                            "{}_{}_{}_{}".format(key, pair[0], pair[1], idx)
-                        ] = loss[key]
+                        loss_dict["{}_{}_{}_{}".format(key, pair[0], pair[1], idx)] = (
+                            loss[key]
+                        )
                 else:
                     loss_dict["{}_{}".format(self.name, idx)] = loss
             else:
@@ -329,9 +329,9 @@ class DistillationDKDLoss(DKDLoss):
                     loss = super().forward(out1, out2)
                 if isinstance(loss, dict):
                     for key in loss:
-                        loss_dict[
-                            "{}_{}_{}_{}".format(key, pair[0], pair[1], idx)
-                        ] = loss[key]
+                        loss_dict["{}_{}_{}_{}".format(key, pair[0], pair[1], idx)] = (
+                            loss[key]
+                        )
                 else:
                     loss_dict["{}_{}".format(self.name, idx)] = loss
             else:
@@ -472,9 +472,9 @@ class DistillationKLDivLoss(KLDivLoss):
                     loss = super().forward(out1, out2)
                 if isinstance(loss, dict):
                     for key in loss:
-                        loss_dict[
-                            "{}_{}_{}_{}".format(key, pair[0], pair[1], idx)
-                        ] = loss[key]
+                        loss_dict["{}_{}_{}_{}".format(key, pair[0], pair[1], idx)] = (
+                            loss[key]
+                        )
                 else:
                     loss_dict["{}_{}".format(self.name, idx)] = loss
             else:
@@ -583,9 +583,9 @@ class DistillationDKDLoss(DKDLoss):
                     loss = super().forward(out1, out2)
                 if isinstance(loss, dict):
                     for key in loss:
-                        loss_dict[
-                            "{}_{}_{}_{}".format(key, pair[0], pair[1], idx)
-                        ] = loss[key]
+                        loss_dict["{}_{}_{}_{}".format(key, pair[0], pair[1], idx)] = (
+                            loss[key]
+                        )
                 else:
                     loss_dict["{}_{}".format(self.name, idx)] = loss
             else:
diff --git a/ppocr/losses/rec_can_loss.py b/ppocr/losses/rec_can_loss.py
index 6ec0b794b..1bc58f585 100644
--- a/ppocr/losses/rec_can_loss.py
+++ b/ppocr/losses/rec_can_loss.py
@@ -1,88 +1,88 @@
-# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This code is refer from:
-https://github.com/LBH1024/CAN/models/can.py
-"""
-
-import paddle
-import paddle.nn as nn
-import numpy as np
-
-
-class CANLoss(nn.Layer):
-    """
-    CANLoss is consist of two part:
-        word_average_loss: average accuracy of the symbol
-        counting_loss: counting loss of every symbol
-    """
-
-    def __init__(self):
-        super(CANLoss, self).__init__()
-
-        self.use_label_mask = False
-        self.out_channel = 111
-        self.cross = (
-            nn.CrossEntropyLoss(reduction="none")
-            if self.use_label_mask
-            else nn.CrossEntropyLoss()
-        )
-        self.counting_loss = nn.SmoothL1Loss(reduction="mean")
-        self.ratio = 16
-
-    def forward(self, preds, batch):
-        word_probs = preds[0]
-        counting_preds = preds[1]
-        counting_preds1 = preds[2]
-        counting_preds2 = preds[3]
-        labels = batch[2]
-        labels_mask = batch[3]
-        counting_labels = gen_counting_label(labels, self.out_channel, True)
-        counting_loss = (
-            self.counting_loss(counting_preds1, counting_labels)
-            + self.counting_loss(counting_preds2, counting_labels)
-            + self.counting_loss(counting_preds, counting_labels)
-        )
-
-        word_loss = self.cross(
-            paddle.reshape(word_probs, [-1, word_probs.shape[-1]]),
-            paddle.reshape(labels, [-1]),
-        )
-        word_average_loss = (
-            paddle.sum(paddle.reshape(word_loss * labels_mask, [-1]))
-            / (paddle.sum(labels_mask) + 1e-10)
-            if self.use_label_mask
-            else word_loss
-        )
-        loss = word_average_loss + counting_loss
-        return {"loss": loss}
-
-
-def gen_counting_label(labels, channel, tag):
-    b, t = labels.shape
-    counting_labels = np.zeros([b, channel])
-
-    if tag:
-        ignore = [0, 1, 107, 108, 109, 110]
-    else:
-        ignore = []
-    for i in range(b):
-        for j in range(t):
-            k = labels[i][j]
-            if k in ignore:
-                continue
-            else:
-                counting_labels[i][k] += 1
-    counting_labels = paddle.to_tensor(counting_labels, dtype="float32")
-    return counting_labels
+# copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+This code is refer from:
+https://github.com/LBH1024/CAN/models/can.py
+"""
+
+import paddle
+import paddle.nn as nn
+import numpy as np
+
+
+class CANLoss(nn.Layer):
+    """
+    CANLoss is consist of two part:
+        word_average_loss: average accuracy of the symbol
+        counting_loss: counting loss of every symbol
+    """
+
+    def __init__(self):
+        super(CANLoss, self).__init__()
+
+        self.use_label_mask = False
+        self.out_channel = 111
+        self.cross = (
+            nn.CrossEntropyLoss(reduction="none")
+            if self.use_label_mask
+            else nn.CrossEntropyLoss()
+        )
+        self.counting_loss = nn.SmoothL1Loss(reduction="mean")
+        self.ratio = 16
+
+    def forward(self, preds, batch):
+        word_probs = preds[0]
+        counting_preds = preds[1]
+        counting_preds1 = preds[2]
+        counting_preds2 = preds[3]
+        labels = batch[2]
+        labels_mask = batch[3]
+        counting_labels = gen_counting_label(labels, self.out_channel, True)
+        counting_loss = (
+            self.counting_loss(counting_preds1, counting_labels)
+            + self.counting_loss(counting_preds2, counting_labels)
+            + self.counting_loss(counting_preds, counting_labels)
+        )
+
+        word_loss = self.cross(
+            paddle.reshape(word_probs, [-1, word_probs.shape[-1]]),
+            paddle.reshape(labels, [-1]),
+        )
+        word_average_loss = (
+            paddle.sum(paddle.reshape(word_loss * labels_mask, [-1]))
+            / (paddle.sum(labels_mask) + 1e-10)
+            if self.use_label_mask
+            else word_loss
+        )
+        loss = word_average_loss + counting_loss
+        return {"loss": loss}
+
+
+def gen_counting_label(labels, channel, tag):
+    b, t = labels.shape
+    counting_labels = np.zeros([b, channel])
+
+    if tag:
+        ignore = [0, 1, 107, 108, 109, 110]
+    else:
+        ignore = []
+    for i in range(b):
+        for j in range(t):
+            k = labels[i][j]
+            if k in ignore:
+                continue
+            else:
+                counting_labels[i][k] += 1
+    counting_labels = paddle.to_tensor(counting_labels, dtype="float32")
+    return counting_labels
diff --git a/ppocr/losses/rec_rfl_loss.py b/ppocr/losses/rec_rfl_loss.py
index 3af677f35..b2b185693 100644
--- a/ppocr/losses/rec_rfl_loss.py
+++ b/ppocr/losses/rec_rfl_loss.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/blob/main/davarocr/davar_common/models/loss/cross_entropy_loss.py
 """
 from __future__ import absolute_import
diff --git a/ppocr/losses/rec_satrn_loss.py b/ppocr/losses/rec_satrn_loss.py
index b198693a9..16f1b6acf 100644
--- a/ppocr/losses/rec_satrn_loss.py
+++ b/ppocr/losses/rec_satrn_loss.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/1.x/mmocr/models/textrecog/module_losses/ce_module_loss.py
 """
 from __future__ import absolute_import
diff --git a/ppocr/losses/rec_vl_loss.py b/ppocr/losses/rec_vl_loss.py
index 34c470e37..6c41177e1 100644
--- a/ppocr/losses/rec_vl_loss.py
+++ b/ppocr/losses/rec_vl_loss.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/wangyuxin87/VisionLAN
 """
 
diff --git a/ppocr/modeling/backbones/det_resnet.py b/ppocr/modeling/backbones/det_resnet.py
index ff059610c..384282932 100644
--- a/ppocr/modeling/backbones/det_resnet.py
+++ b/ppocr/modeling/backbones/det_resnet.py
@@ -185,9 +185,11 @@ class ResNet(nn.Layer):
                     bottleneck_block = self.add_sublayer(
                         conv_name,
                         BottleneckBlock(
-                            num_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block] * 4,
+                            num_channels=(
+                                num_channels[block]
+                                if i == 0
+                                else num_filters[block] * 4
+                            ),
                             num_filters=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
@@ -208,9 +210,9 @@ class ResNet(nn.Layer):
                     basic_block = self.add_sublayer(
                         conv_name,
                         BasicBlock(
-                            num_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block],
+                            num_channels=(
+                                num_channels[block] if i == 0 else num_filters[block]
+                            ),
                             num_filters=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
diff --git a/ppocr/modeling/backbones/det_resnet_vd.py b/ppocr/modeling/backbones/det_resnet_vd.py
index 1d26d5789..070ba3c97 100644
--- a/ppocr/modeling/backbones/det_resnet_vd.py
+++ b/ppocr/modeling/backbones/det_resnet_vd.py
@@ -316,9 +316,11 @@ class ResNet_vd(nn.Layer):
                     bottleneck_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BottleneckBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block] * 4,
+                            in_channels=(
+                                num_channels[block]
+                                if i == 0
+                                else num_filters[block] * 4
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
@@ -339,9 +341,9 @@ class ResNet_vd(nn.Layer):
                     basic_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BasicBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block],
+                            in_channels=(
+                                num_channels[block] if i == 0 else num_filters[block]
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
diff --git a/ppocr/modeling/backbones/det_resnet_vd_sast.py b/ppocr/modeling/backbones/det_resnet_vd_sast.py
index 7cb349afc..6769dd9f2 100644
--- a/ppocr/modeling/backbones/det_resnet_vd_sast.py
+++ b/ppocr/modeling/backbones/det_resnet_vd_sast.py
@@ -261,9 +261,11 @@ class ResNet_SAST(nn.Layer):
                     bottleneck_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BottleneckBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block] * 4,
+                            in_channels=(
+                                num_channels[block]
+                                if i == 0
+                                else num_filters[block] * 4
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
@@ -284,9 +286,9 @@ class ResNet_SAST(nn.Layer):
                     basic_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BasicBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block],
+                            in_channels=(
+                                num_channels[block] if i == 0 else num_filters[block]
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
diff --git a/ppocr/modeling/backbones/e2e_resnet_vd_pg.py b/ppocr/modeling/backbones/e2e_resnet_vd_pg.py
index 16defc771..25738cd3e 100644
--- a/ppocr/modeling/backbones/e2e_resnet_vd_pg.py
+++ b/ppocr/modeling/backbones/e2e_resnet_vd_pg.py
@@ -241,9 +241,11 @@ class ResNet(nn.Layer):
                     bottleneck_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BottleneckBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block] * 4,
+                            in_channels=(
+                                num_channels[block]
+                                if i == 0
+                                else num_filters[block] * 4
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
@@ -264,9 +266,9 @@ class ResNet(nn.Layer):
                     basic_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BasicBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block],
+                            in_channels=(
+                                num_channels[block] if i == 0 else num_filters[block]
+                            ),
                             out_channels=num_filters[block],
                             stride=2 if i == 0 and block != 0 else 1,
                             shortcut=shortcut,
diff --git a/ppocr/modeling/backbones/rec_densenet.py b/ppocr/modeling/backbones/rec_densenet.py
index ad6e9e5c6..3e6458490 100644
--- a/ppocr/modeling/backbones/rec_densenet.py
+++ b/ppocr/modeling/backbones/rec_densenet.py
@@ -1,150 +1,150 @@
-# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This code is refer from:
-https://github.com/LBH1024/CAN/models/densenet.py
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import math
-import paddle
-import paddle.nn as nn
-import paddle.nn.functional as F
-
-
-class Bottleneck(nn.Layer):
-    def __init__(self, nChannels, growthRate, use_dropout):
-        super(Bottleneck, self).__init__()
-        interChannels = 4 * growthRate
-        self.bn1 = nn.BatchNorm2D(interChannels)
-        self.conv1 = nn.Conv2D(
-            nChannels, interChannels, kernel_size=1, bias_attr=None
-        )  # Xavier initialization
-        self.bn2 = nn.BatchNorm2D(growthRate)
-        self.conv2 = nn.Conv2D(
-            interChannels, growthRate, kernel_size=3, padding=1, bias_attr=None
-        )  # Xavier initialization
-        self.use_dropout = use_dropout
-        self.dropout = nn.Dropout(p=0.2)
-
-    def forward(self, x):
-        out = F.relu(self.bn1(self.conv1(x)))
-        if self.use_dropout:
-            out = self.dropout(out)
-        out = F.relu(self.bn2(self.conv2(out)))
-        if self.use_dropout:
-            out = self.dropout(out)
-        out = paddle.concat([x, out], 1)
-        return out
-
-
-class SingleLayer(nn.Layer):
-    def __init__(self, nChannels, growthRate, use_dropout):
-        super(SingleLayer, self).__init__()
-        self.bn1 = nn.BatchNorm2D(nChannels)
-        self.conv1 = nn.Conv2D(
-            nChannels, growthRate, kernel_size=3, padding=1, bias_attr=False
-        )
-
-        self.use_dropout = use_dropout
-        self.dropout = nn.Dropout(p=0.2)
-
-    def forward(self, x):
-        out = self.conv1(F.relu(x))
-        if self.use_dropout:
-            out = self.dropout(out)
-
-        out = paddle.concat([x, out], 1)
-        return out
-
-
-class Transition(nn.Layer):
-    def __init__(self, nChannels, out_channels, use_dropout):
-        super(Transition, self).__init__()
-        self.bn1 = nn.BatchNorm2D(out_channels)
-        self.conv1 = nn.Conv2D(nChannels, out_channels, kernel_size=1, bias_attr=False)
-        self.use_dropout = use_dropout
-        self.dropout = nn.Dropout(p=0.2)
-
-    def forward(self, x):
-        out = F.relu(self.bn1(self.conv1(x)))
-        if self.use_dropout:
-            out = self.dropout(out)
-        out = F.avg_pool2d(out, 2, ceil_mode=True, exclusive=False)
-        return out
-
-
-class DenseNet(nn.Layer):
-    def __init__(
-        self, growthRate, reduction, bottleneck, use_dropout, input_channel, **kwargs
-    ):
-        super(DenseNet, self).__init__()
-
-        nDenseBlocks = 16
-        nChannels = 2 * growthRate
-
-        self.conv1 = nn.Conv2D(
-            input_channel,
-            nChannels,
-            kernel_size=7,
-            padding=3,
-            stride=2,
-            bias_attr=False,
-        )
-        self.dense1 = self._make_dense(
-            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
-        )
-        nChannels += nDenseBlocks * growthRate
-        out_channels = int(math.floor(nChannels * reduction))
-        self.trans1 = Transition(nChannels, out_channels, use_dropout)
-
-        nChannels = out_channels
-        self.dense2 = self._make_dense(
-            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
-        )
-        nChannels += nDenseBlocks * growthRate
-        out_channels = int(math.floor(nChannels * reduction))
-        self.trans2 = Transition(nChannels, out_channels, use_dropout)
-
-        nChannels = out_channels
-        self.dense3 = self._make_dense(
-            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
-        )
-        self.out_channels = out_channels
-
-    def _make_dense(self, nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout):
-        layers = []
-        for i in range(int(nDenseBlocks)):
-            if bottleneck:
-                layers.append(Bottleneck(nChannels, growthRate, use_dropout))
-            else:
-                layers.append(SingleLayer(nChannels, growthRate, use_dropout))
-            nChannels += growthRate
-        return nn.Sequential(*layers)
-
-    def forward(self, inputs):
-        x, x_m, y = inputs
-        out = self.conv1(x)
-        out = F.relu(out)
-        out = F.max_pool2d(out, 2, ceil_mode=True)
-        out = self.dense1(out)
-        out = self.trans1(out)
-        out = self.dense2(out)
-        out = self.trans2(out)
-        out = self.dense3(out)
-        return out, x_m, y
+# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+This code is refer from:
+https://github.com/LBH1024/CAN/models/densenet.py
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import math
+import paddle
+import paddle.nn as nn
+import paddle.nn.functional as F
+
+
+class Bottleneck(nn.Layer):
+    def __init__(self, nChannels, growthRate, use_dropout):
+        super(Bottleneck, self).__init__()
+        interChannels = 4 * growthRate
+        self.bn1 = nn.BatchNorm2D(interChannels)
+        self.conv1 = nn.Conv2D(
+            nChannels, interChannels, kernel_size=1, bias_attr=None
+        )  # Xavier initialization
+        self.bn2 = nn.BatchNorm2D(growthRate)
+        self.conv2 = nn.Conv2D(
+            interChannels, growthRate, kernel_size=3, padding=1, bias_attr=None
+        )  # Xavier initialization
+        self.use_dropout = use_dropout
+        self.dropout = nn.Dropout(p=0.2)
+
+    def forward(self, x):
+        out = F.relu(self.bn1(self.conv1(x)))
+        if self.use_dropout:
+            out = self.dropout(out)
+        out = F.relu(self.bn2(self.conv2(out)))
+        if self.use_dropout:
+            out = self.dropout(out)
+        out = paddle.concat([x, out], 1)
+        return out
+
+
+class SingleLayer(nn.Layer):
+    def __init__(self, nChannels, growthRate, use_dropout):
+        super(SingleLayer, self).__init__()
+        self.bn1 = nn.BatchNorm2D(nChannels)
+        self.conv1 = nn.Conv2D(
+            nChannels, growthRate, kernel_size=3, padding=1, bias_attr=False
+        )
+
+        self.use_dropout = use_dropout
+        self.dropout = nn.Dropout(p=0.2)
+
+    def forward(self, x):
+        out = self.conv1(F.relu(x))
+        if self.use_dropout:
+            out = self.dropout(out)
+
+        out = paddle.concat([x, out], 1)
+        return out
+
+
+class Transition(nn.Layer):
+    def __init__(self, nChannels, out_channels, use_dropout):
+        super(Transition, self).__init__()
+        self.bn1 = nn.BatchNorm2D(out_channels)
+        self.conv1 = nn.Conv2D(nChannels, out_channels, kernel_size=1, bias_attr=False)
+        self.use_dropout = use_dropout
+        self.dropout = nn.Dropout(p=0.2)
+
+    def forward(self, x):
+        out = F.relu(self.bn1(self.conv1(x)))
+        if self.use_dropout:
+            out = self.dropout(out)
+        out = F.avg_pool2d(out, 2, ceil_mode=True, exclusive=False)
+        return out
+
+
+class DenseNet(nn.Layer):
+    def __init__(
+        self, growthRate, reduction, bottleneck, use_dropout, input_channel, **kwargs
+    ):
+        super(DenseNet, self).__init__()
+
+        nDenseBlocks = 16
+        nChannels = 2 * growthRate
+
+        self.conv1 = nn.Conv2D(
+            input_channel,
+            nChannels,
+            kernel_size=7,
+            padding=3,
+            stride=2,
+            bias_attr=False,
+        )
+        self.dense1 = self._make_dense(
+            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
+        )
+        nChannels += nDenseBlocks * growthRate
+        out_channels = int(math.floor(nChannels * reduction))
+        self.trans1 = Transition(nChannels, out_channels, use_dropout)
+
+        nChannels = out_channels
+        self.dense2 = self._make_dense(
+            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
+        )
+        nChannels += nDenseBlocks * growthRate
+        out_channels = int(math.floor(nChannels * reduction))
+        self.trans2 = Transition(nChannels, out_channels, use_dropout)
+
+        nChannels = out_channels
+        self.dense3 = self._make_dense(
+            nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout
+        )
+        self.out_channels = out_channels
+
+    def _make_dense(self, nChannels, growthRate, nDenseBlocks, bottleneck, use_dropout):
+        layers = []
+        for i in range(int(nDenseBlocks)):
+            if bottleneck:
+                layers.append(Bottleneck(nChannels, growthRate, use_dropout))
+            else:
+                layers.append(SingleLayer(nChannels, growthRate, use_dropout))
+            nChannels += growthRate
+        return nn.Sequential(*layers)
+
+    def forward(self, inputs):
+        x, x_m, y = inputs
+        out = self.conv1(x)
+        out = F.relu(out)
+        out = F.max_pool2d(out, 2, ceil_mode=True)
+        out = self.dense1(out)
+        out = self.trans1(out)
+        out = self.dense2(out)
+        out = self.trans2(out)
+        out = self.dense3(out)
+        return out, x_m, y
diff --git a/ppocr/modeling/backbones/rec_efficientb3_pren.py b/ppocr/modeling/backbones/rec_efficientb3_pren.py
index 916a090e2..dab4a2a44 100644
--- a/ppocr/modeling/backbones/rec_efficientb3_pren.py
+++ b/ppocr/modeling/backbones/rec_efficientb3_pren.py
@@ -241,8 +241,8 @@ class EfficientNetb3_PREN(nn.Layer):
         super(EfficientNetb3_PREN, self).__init__()
         """
         the fllowing are efficientnetb3's superparams,
-        they means efficientnetb3 network's width, depth, resolution and 
-        dropout respectively, to fit for text recognition task, the resolution 
+        they means efficientnetb3 network's width, depth, resolution and
+        dropout respectively, to fit for text recognition task, the resolution
         here is changed from 300 to 64.
         """
         w, d, s, p = 1.2, 1.4, 64, 0.3
diff --git a/ppocr/modeling/backbones/rec_micronet.py b/ppocr/modeling/backbones/rec_micronet.py
index 6550c9201..bf7c8d505 100644
--- a/ppocr/modeling/backbones/rec_micronet.py
+++ b/ppocr/modeling/backbones/rec_micronet.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/liyunsheng13/micronet/blob/main/backbone/micronet.py
 https://github.com/liyunsheng13/micronet/blob/main/backbone/activation.py
 """
@@ -364,114 +364,136 @@ class DYMicroBlock(nn.Layer):
         if gs1[0] == 0:
             self.layers = nn.Sequential(
                 DepthSpatialSepConv(inp, t1, kernel_size, stride),
-                DYShiftMax(
-                    hidden_dim2,
-                    hidden_dim2,
-                    act_max=2.0,
-                    act_relu=True if y2 == 2 else False,
-                    init_a=init_a,
-                    reduction=act_reduction,
-                    init_b=init_b,
-                    g=gs1,
-                    expansion=False,
-                )
-                if y2 > 0
-                else nn.ReLU6(),
+                (
+                    DYShiftMax(
+                        hidden_dim2,
+                        hidden_dim2,
+                        act_max=2.0,
+                        act_relu=True if y2 == 2 else False,
+                        init_a=init_a,
+                        reduction=act_reduction,
+                        init_b=init_b,
+                        g=gs1,
+                        expansion=False,
+                    )
+                    if y2 > 0
+                    else nn.ReLU6()
+                ),
                 ChannelShuffle(gs1[1]) if shuffle else nn.Sequential(),
-                ChannelShuffle(hidden_dim2 // 2)
-                if shuffle and y2 != 0
-                else nn.Sequential(),
+                (
+                    ChannelShuffle(hidden_dim2 // 2)
+                    if shuffle and y2 != 0
+                    else nn.Sequential()
+                ),
                 GroupConv(hidden_dim2, oup, (g1, g2)),
-                DYShiftMax(
-                    oup,
-                    oup,
-                    act_max=2.0,
-                    act_relu=False,
-                    init_a=[1.0, 0.0],
-                    reduction=act_reduction // 2,
-                    init_b=[0.0, 0.0],
-                    g=(g1, g2),
-                    expansion=False,
-                )
-                if y3 > 0
-                else nn.Sequential(),
+                (
+                    DYShiftMax(
+                        oup,
+                        oup,
+                        act_max=2.0,
+                        act_relu=False,
+                        init_a=[1.0, 0.0],
+                        reduction=act_reduction // 2,
+                        init_b=[0.0, 0.0],
+                        g=(g1, g2),
+                        expansion=False,
+                    )
+                    if y3 > 0
+                    else nn.Sequential()
+                ),
                 ChannelShuffle(g2) if shuffle else nn.Sequential(),
-                ChannelShuffle(oup // 2)
-                if shuffle and oup % 2 == 0 and y3 != 0
-                else nn.Sequential(),
+                (
+                    ChannelShuffle(oup // 2)
+                    if shuffle and oup % 2 == 0 and y3 != 0
+                    else nn.Sequential()
+                ),
             )
         elif g2 == 0:
             self.layers = nn.Sequential(
                 GroupConv(inp, hidden_dim2, gs1),
-                DYShiftMax(
-                    hidden_dim2,
-                    hidden_dim2,
-                    act_max=2.0,
-                    act_relu=False,
-                    init_a=[1.0, 0.0],
-                    reduction=act_reduction,
-                    init_b=[0.0, 0.0],
-                    g=gs1,
-                    expansion=False,
-                )
-                if y3 > 0
-                else nn.Sequential(),
+                (
+                    DYShiftMax(
+                        hidden_dim2,
+                        hidden_dim2,
+                        act_max=2.0,
+                        act_relu=False,
+                        init_a=[1.0, 0.0],
+                        reduction=act_reduction,
+                        init_b=[0.0, 0.0],
+                        g=gs1,
+                        expansion=False,
+                    )
+                    if y3 > 0
+                    else nn.Sequential()
+                ),
             )
         else:
             self.layers = nn.Sequential(
                 GroupConv(inp, hidden_dim2, gs1),
-                DYShiftMax(
-                    hidden_dim2,
-                    hidden_dim2,
-                    act_max=2.0,
-                    act_relu=True if y1 == 2 else False,
-                    init_a=init_a,
-                    reduction=act_reduction,
-                    init_b=init_b,
-                    g=gs1,
-                    expansion=False,
-                )
-                if y1 > 0
-                else nn.ReLU6(),
+                (
+                    DYShiftMax(
+                        hidden_dim2,
+                        hidden_dim2,
+                        act_max=2.0,
+                        act_relu=True if y1 == 2 else False,
+                        init_a=init_a,
+                        reduction=act_reduction,
+                        init_b=init_b,
+                        g=gs1,
+                        expansion=False,
+                    )
+                    if y1 > 0
+                    else nn.ReLU6()
+                ),
                 ChannelShuffle(gs1[1]) if shuffle else nn.Sequential(),
-                DepthSpatialSepConv(hidden_dim2, (1, 1), kernel_size, stride)
-                if depthsep
-                else DepthConv(hidden_dim2, hidden_dim2, kernel_size, stride),
+                (
+                    DepthSpatialSepConv(hidden_dim2, (1, 1), kernel_size, stride)
+                    if depthsep
+                    else DepthConv(hidden_dim2, hidden_dim2, kernel_size, stride)
+                ),
                 nn.Sequential(),
-                DYShiftMax(
-                    hidden_dim2,
-                    hidden_dim2,
-                    act_max=2.0,
-                    act_relu=True if y2 == 2 else False,
-                    init_a=init_a,
-                    reduction=act_reduction,
-                    init_b=init_b,
-                    g=gs1,
-                    expansion=True,
-                )
-                if y2 > 0
-                else nn.ReLU6(),
-                ChannelShuffle(hidden_dim2 // 4)
-                if shuffle and y1 != 0 and y2 != 0
-                else nn.Sequential()
-                if y1 == 0 and y2 == 0
-                else ChannelShuffle(hidden_dim2 // 2),
+                (
+                    DYShiftMax(
+                        hidden_dim2,
+                        hidden_dim2,
+                        act_max=2.0,
+                        act_relu=True if y2 == 2 else False,
+                        init_a=init_a,
+                        reduction=act_reduction,
+                        init_b=init_b,
+                        g=gs1,
+                        expansion=True,
+                    )
+                    if y2 > 0
+                    else nn.ReLU6()
+                ),
+                (
+                    ChannelShuffle(hidden_dim2 // 4)
+                    if shuffle and y1 != 0 and y2 != 0
+                    else (
+                        nn.Sequential()
+                        if y1 == 0 and y2 == 0
+                        else ChannelShuffle(hidden_dim2 // 2)
+                    )
+                ),
                 GroupConv(hidden_dim2, oup, (g1, g2)),
-                DYShiftMax(
-                    oup,
-                    oup,
-                    act_max=2.0,
-                    act_relu=False,
-                    init_a=[1.0, 0.0],
-                    reduction=act_reduction // 2
-                    if oup < hidden_dim2
-                    else act_reduction,
-                    init_b=[0.0, 0.0],
-                    g=(g1, g2),
-                    expansion=False,
-                )
-                if y3 > 0
-                else nn.Sequential(),
+                (
+                    DYShiftMax(
+                        oup,
+                        oup,
+                        act_max=2.0,
+                        act_relu=False,
+                        init_a=[1.0, 0.0],
+                        reduction=(
+                            act_reduction // 2 if oup < hidden_dim2 else act_reduction
+                        ),
+                        init_b=[0.0, 0.0],
+                        g=(g1, g2),
+                        expansion=False,
+                    )
+                    if y3 > 0
+                    else nn.Sequential()
+                ),
                 ChannelShuffle(g2) if shuffle else nn.Sequential(),
                 ChannelShuffle(oup // 2) if shuffle and y3 != 0 else nn.Sequential(),
             )
diff --git a/ppocr/modeling/backbones/rec_nrtr_mtb.py b/ppocr/modeling/backbones/rec_nrtr_mtb.py
index c48683849..608a8a7f4 100644
--- a/ppocr/modeling/backbones/rec_nrtr_mtb.py
+++ b/ppocr/modeling/backbones/rec_nrtr_mtb.py
@@ -35,9 +35,7 @@ class MTB(nn.Layer):
                     ),
                 )
                 self.block.add_sublayer("relu_{}".format(i), nn.ReLU())
-                self.block.add_sublayer(
-                    "bn_{}".format(i), nn.BatchNorm2D(32 * (2**i))
-                )
+                self.block.add_sublayer("bn_{}".format(i), nn.BatchNorm2D(32 * (2**i)))
 
     def forward(self, images):
         x = self.block(images)
diff --git a/ppocr/modeling/backbones/rec_repvit.py b/ppocr/modeling/backbones/rec_repvit.py
index e983569c4..2b3d3333d 100644
--- a/ppocr/modeling/backbones/rec_repvit.py
+++ b/ppocr/modeling/backbones/rec_repvit.py
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/THU-MIG/RepViT
 """
 
diff --git a/ppocr/modeling/backbones/rec_resnet_31.py b/ppocr/modeling/backbones/rec_resnet_31.py
index 2db2549ee..c4aa4fc32 100644
--- a/ppocr/modeling/backbones/rec_resnet_31.py
+++ b/ppocr/modeling/backbones/rec_resnet_31.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/layers/conv_layer.py
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/backbones/resnet31_ocr.py
 """
diff --git a/ppocr/modeling/backbones/rec_resnet_32.py b/ppocr/modeling/backbones/rec_resnet_32.py
index 51059ef12..63d78d34a 100644
--- a/ppocr/modeling/backbones/rec_resnet_32.py
+++ b/ppocr/modeling/backbones/rec_resnet_32.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/davarocr/davar_rcg/models/backbones/ResNet32.py
 """
 
diff --git a/ppocr/modeling/backbones/rec_resnet_45.py b/ppocr/modeling/backbones/rec_resnet_45.py
index 634c9a248..914d972d1 100644
--- a/ppocr/modeling/backbones/rec_resnet_45.py
+++ b/ppocr/modeling/backbones/rec_resnet_45.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/FangShancheng/ABINet/tree/main/modules
 """
 
diff --git a/ppocr/modeling/backbones/rec_resnet_rfl.py b/ppocr/modeling/backbones/rec_resnet_rfl.py
index 2b4e5e0b3..4742ef812 100644
--- a/ppocr/modeling/backbones/rec_resnet_rfl.py
+++ b/ppocr/modeling/backbones/rec_resnet_rfl.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/blob/main/davarocr/davar_rcg/models/backbones/ResNetRFL.py
 """
 
diff --git a/ppocr/modeling/backbones/rec_resnet_vd.py b/ppocr/modeling/backbones/rec_resnet_vd.py
index 3dad51fba..343d5ed60 100644
--- a/ppocr/modeling/backbones/rec_resnet_vd.py
+++ b/ppocr/modeling/backbones/rec_resnet_vd.py
@@ -259,9 +259,11 @@ class ResNet(nn.Layer):
                     bottleneck_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BottleneckBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block] * 4,
+                            in_channels=(
+                                num_channels[block]
+                                if i == 0
+                                else num_filters[block] * 4
+                            ),
                             out_channels=num_filters[block],
                             stride=stride,
                             shortcut=shortcut,
@@ -285,9 +287,9 @@ class ResNet(nn.Layer):
                     basic_block = self.add_sublayer(
                         "bb_%d_%d" % (block, i),
                         BasicBlock(
-                            in_channels=num_channels[block]
-                            if i == 0
-                            else num_filters[block],
+                            in_channels=(
+                                num_channels[block] if i == 0 else num_filters[block]
+                            ),
                             out_channels=num_filters[block],
                             stride=stride,
                             shortcut=shortcut,
diff --git a/ppocr/modeling/backbones/rec_shallow_cnn.py b/ppocr/modeling/backbones/rec_shallow_cnn.py
index 85c043d1f..e5a8b65e3 100644
--- a/ppocr/modeling/backbones/rec_shallow_cnn.py
+++ b/ppocr/modeling/backbones/rec_shallow_cnn.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/1.x/mmocr/models/textrecog/backbones/shallow_cnn.py
 """
 
diff --git a/ppocr/modeling/backbones/rec_vit_parseq.py b/ppocr/modeling/backbones/rec_vit_parseq.py
index 1ede97b17..049733f3a 100644
--- a/ppocr/modeling/backbones/rec_vit_parseq.py
+++ b/ppocr/modeling/backbones/rec_vit_parseq.py
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/PaddlePaddle/PaddleClas/blob/release%2F2.5/ppcls/arch/backbone/model_zoo/vision_transformer.py
 """
 
diff --git a/ppocr/modeling/backbones/rec_vitstr.py b/ppocr/modeling/backbones/rec_vitstr.py
index 87b5dac88..9a273da93 100644
--- a/ppocr/modeling/backbones/rec_vitstr.py
+++ b/ppocr/modeling/backbones/rec_vitstr.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/roatienza/deep-text-recognition-benchmark/blob/master/modules/vitstr.py
 """
 
diff --git a/ppocr/modeling/heads/kie_sdmgr_head.py b/ppocr/modeling/heads/kie_sdmgr_head.py
index bc019ec80..eca8efdc1 100644
--- a/ppocr/modeling/heads/kie_sdmgr_head.py
+++ b/ppocr/modeling/heads/kie_sdmgr_head.py
@@ -141,8 +141,7 @@ class GNNLayer(nn.Layer):
             )
             residuals.append(
                 (
-                    residual
-                    * cat_nodes[start : start + num**2].reshape([num, num, -1])
+                    residual * cat_nodes[start : start + num**2].reshape([num, num, -1])
                 ).sum(1)
             )
             start += num**2
diff --git a/ppocr/modeling/heads/rec_abinet_head.py b/ppocr/modeling/heads/rec_abinet_head.py
index 2cc847d61..6528be8e7 100644
--- a/ppocr/modeling/heads/rec_abinet_head.py
+++ b/ppocr/modeling/heads/rec_abinet_head.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/FangShancheng/ABINet/tree/main/modules
 """
 
diff --git a/ppocr/modeling/heads/rec_can_head.py b/ppocr/modeling/heads/rec_can_head.py
index 921b8e4a8..e80951c87 100644
--- a/ppocr/modeling/heads/rec_can_head.py
+++ b/ppocr/modeling/heads/rec_can_head.py
@@ -1,338 +1,338 @@
-# copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""
-This code is refer from:
-https://github.com/LBH1024/CAN/models/can.py
-https://github.com/LBH1024/CAN/models/counting.py
-https://github.com/LBH1024/CAN/models/decoder.py
-https://github.com/LBH1024/CAN/models/attention.py
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import paddle.nn as nn
-import paddle
-import math
-
-"""
-Counting Module
-"""
-
-
-class ChannelAtt(nn.Layer):
-    def __init__(self, channel, reduction):
-        super(ChannelAtt, self).__init__()
-        self.avg_pool = nn.AdaptiveAvgPool2D(1)
-
-        self.fc = nn.Sequential(
-            nn.Linear(channel, channel // reduction),
-            nn.ReLU(),
-            nn.Linear(channel // reduction, channel),
-            nn.Sigmoid(),
-        )
-
-    def forward(self, x):
-        b, c, _, _ = x.shape
-        y = paddle.reshape(self.avg_pool(x), [b, c])
-        y = paddle.reshape(self.fc(y), [b, c, 1, 1])
-        return x * y
-
-
-class CountingDecoder(nn.Layer):
-    def __init__(self, in_channel, out_channel, kernel_size):
-        super(CountingDecoder, self).__init__()
-        self.in_channel = in_channel
-        self.out_channel = out_channel
-
-        self.trans_layer = nn.Sequential(
-            nn.Conv2D(
-                self.in_channel,
-                512,
-                kernel_size=kernel_size,
-                padding=kernel_size // 2,
-                bias_attr=False,
-            ),
-            nn.BatchNorm2D(512),
-        )
-
-        self.channel_att = ChannelAtt(512, 16)
-
-        self.pred_layer = nn.Sequential(
-            nn.Conv2D(512, self.out_channel, kernel_size=1, bias_attr=False),
-            nn.Sigmoid(),
-        )
-
-    def forward(self, x, mask):
-        b, _, h, w = x.shape
-        x = self.trans_layer(x)
-        x = self.channel_att(x)
-        x = self.pred_layer(x)
-
-        if mask is not None:
-            x = x * mask
-        x = paddle.reshape(x, [b, self.out_channel, -1])
-        x1 = paddle.sum(x, axis=-1)
-
-        return x1, paddle.reshape(x, [b, self.out_channel, h, w])
-
-
-"""
-Attention Decoder
-"""
-
-
-class PositionEmbeddingSine(nn.Layer):
-    def __init__(
-        self, num_pos_feats=64, temperature=10000, normalize=False, scale=None
-    ):
-        super().__init__()
-        self.num_pos_feats = num_pos_feats
-        self.temperature = temperature
-        self.normalize = normalize
-        if scale is not None and normalize is False:
-            raise ValueError("normalize should be True if scale is passed")
-        if scale is None:
-            scale = 2 * math.pi
-        self.scale = scale
-
-    def forward(self, x, mask):
-        y_embed = paddle.cumsum(mask, 1, dtype="float32")
-        x_embed = paddle.cumsum(mask, 2, dtype="float32")
-
-        if self.normalize:
-            eps = 1e-6
-            y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
-            x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
-        dim_t = paddle.arange(self.num_pos_feats, dtype="float32")
-        dim_d = paddle.expand(paddle.to_tensor(2), dim_t.shape)
-        dim_t = self.temperature ** (
-            2 * (dim_t / dim_d).astype("int64") / self.num_pos_feats
-        )
-
-        pos_x = paddle.unsqueeze(x_embed, [3]) / dim_t
-        pos_y = paddle.unsqueeze(y_embed, [3]) / dim_t
-
-        pos_x = paddle.flatten(
-            paddle.stack(
-                [paddle.sin(pos_x[:, :, :, 0::2]), paddle.cos(pos_x[:, :, :, 1::2])],
-                axis=4,
-            ),
-            3,
-        )
-        pos_y = paddle.flatten(
-            paddle.stack(
-                [paddle.sin(pos_y[:, :, :, 0::2]), paddle.cos(pos_y[:, :, :, 1::2])],
-                axis=4,
-            ),
-            3,
-        )
-
-        pos = paddle.transpose(paddle.concat([pos_y, pos_x], axis=3), [0, 3, 1, 2])
-
-        return pos
-
-
-class AttDecoder(nn.Layer):
-    def __init__(
-        self,
-        ratio,
-        is_train,
-        input_size,
-        hidden_size,
-        encoder_out_channel,
-        dropout,
-        dropout_ratio,
-        word_num,
-        counting_decoder_out_channel,
-        attention,
-    ):
-        super(AttDecoder, self).__init__()
-        self.input_size = input_size
-        self.hidden_size = hidden_size
-        self.out_channel = encoder_out_channel
-        self.attention_dim = attention["attention_dim"]
-        self.dropout_prob = dropout
-        self.ratio = ratio
-        self.word_num = word_num
-
-        self.counting_num = counting_decoder_out_channel
-        self.is_train = is_train
-
-        self.init_weight = nn.Linear(self.out_channel, self.hidden_size)
-        self.embedding = nn.Embedding(self.word_num, self.input_size)
-        self.word_input_gru = nn.GRUCell(self.input_size, self.hidden_size)
-        self.word_attention = Attention(hidden_size, attention["attention_dim"])
-
-        self.encoder_feature_conv = nn.Conv2D(
-            self.out_channel,
-            self.attention_dim,
-            kernel_size=attention["word_conv_kernel"],
-            padding=attention["word_conv_kernel"] // 2,
-        )
-
-        self.word_state_weight = nn.Linear(self.hidden_size, self.hidden_size)
-        self.word_embedding_weight = nn.Linear(self.input_size, self.hidden_size)
-        self.word_context_weight = nn.Linear(self.out_channel, self.hidden_size)
-        self.counting_context_weight = nn.Linear(self.counting_num, self.hidden_size)
-        self.word_convert = nn.Linear(self.hidden_size, self.word_num)
-
-        if dropout:
-            self.dropout = nn.Dropout(dropout_ratio)
-
-    def forward(self, cnn_features, labels, counting_preds, images_mask):
-        if self.is_train:
-            _, num_steps = labels.shape
-        else:
-            num_steps = 36
-
-        batch_size, _, height, width = cnn_features.shape
-        images_mask = images_mask[:, :, :: self.ratio, :: self.ratio]
-
-        word_probs = paddle.zeros((batch_size, num_steps, self.word_num))
-        word_alpha_sum = paddle.zeros((batch_size, 1, height, width))
-
-        hidden = self.init_hidden(cnn_features, images_mask)
-        counting_context_weighted = self.counting_context_weight(counting_preds)
-        cnn_features_trans = self.encoder_feature_conv(cnn_features)
-
-        position_embedding = PositionEmbeddingSine(256, normalize=True)
-        pos = position_embedding(cnn_features_trans, images_mask[:, 0, :, :])
-
-        cnn_features_trans = cnn_features_trans + pos
-
-        word = paddle.ones([batch_size, 1], dtype="int64")  # init word as sos
-        word = word.squeeze(axis=1)
-        for i in range(num_steps):
-            word_embedding = self.embedding(word)
-            _, hidden = self.word_input_gru(word_embedding, hidden)
-            word_context_vec, _, word_alpha_sum = self.word_attention(
-                cnn_features, cnn_features_trans, hidden, word_alpha_sum, images_mask
-            )
-
-            current_state = self.word_state_weight(hidden)
-            word_weighted_embedding = self.word_embedding_weight(word_embedding)
-            word_context_weighted = self.word_context_weight(word_context_vec)
-
-            if self.dropout_prob:
-                word_out_state = self.dropout(
-                    current_state
-                    + word_weighted_embedding
-                    + word_context_weighted
-                    + counting_context_weighted
-                )
-            else:
-                word_out_state = (
-                    current_state
-                    + word_weighted_embedding
-                    + word_context_weighted
-                    + counting_context_weighted
-                )
-
-            word_prob = self.word_convert(word_out_state)
-            word_probs[:, i] = word_prob
-
-            if self.is_train:
-                word = labels[:, i]
-            else:
-                word = word_prob.argmax(1)
-                word = paddle.multiply(
-                    word, labels[:, i]
-                )  # labels are oneslike tensor in infer/predict mode
-
-        return word_probs
-
-    def init_hidden(self, features, feature_mask):
-        average = paddle.sum(
-            paddle.sum(features * feature_mask, axis=-1), axis=-1
-        ) / paddle.sum((paddle.sum(feature_mask, axis=-1)), axis=-1)
-        average = self.init_weight(average)
-        return paddle.tanh(average)
-
-
-"""
-Attention Module
-"""
-
-
-class Attention(nn.Layer):
-    def __init__(self, hidden_size, attention_dim):
-        super(Attention, self).__init__()
-        self.hidden = hidden_size
-        self.attention_dim = attention_dim
-        self.hidden_weight = nn.Linear(self.hidden, self.attention_dim)
-        self.attention_conv = nn.Conv2D(
-            1, 512, kernel_size=11, padding=5, bias_attr=False
-        )
-        self.attention_weight = nn.Linear(512, self.attention_dim, bias_attr=False)
-        self.alpha_convert = nn.Linear(self.attention_dim, 1)
-
-    def forward(
-        self, cnn_features, cnn_features_trans, hidden, alpha_sum, image_mask=None
-    ):
-        query = self.hidden_weight(hidden)
-        alpha_sum_trans = self.attention_conv(alpha_sum)
-        coverage_alpha = self.attention_weight(
-            paddle.transpose(alpha_sum_trans, [0, 2, 3, 1])
-        )
-        alpha_score = paddle.tanh(
-            paddle.unsqueeze(query, [1, 2])
-            + coverage_alpha
-            + paddle.transpose(cnn_features_trans, [0, 2, 3, 1])
-        )
-        energy = self.alpha_convert(alpha_score)
-        energy = energy - energy.max()
-        energy_exp = paddle.exp(paddle.squeeze(energy, -1))
-
-        if image_mask is not None:
-            energy_exp = energy_exp * paddle.squeeze(image_mask, 1)
-        alpha = energy_exp / (
-            paddle.unsqueeze(paddle.sum(paddle.sum(energy_exp, -1), -1), [1, 2]) + 1e-10
-        )
-        alpha_sum = paddle.unsqueeze(alpha, 1) + alpha_sum
-        context_vector = paddle.sum(
-            paddle.sum((paddle.unsqueeze(alpha, 1) * cnn_features), -1), -1
-        )
-
-        return context_vector, alpha, alpha_sum
-
-
-class CANHead(nn.Layer):
-    def __init__(self, in_channel, out_channel, ratio, attdecoder, **kwargs):
-        super(CANHead, self).__init__()
-
-        self.in_channel = in_channel
-        self.out_channel = out_channel
-
-        self.counting_decoder1 = CountingDecoder(
-            self.in_channel, self.out_channel, 3
-        )  # mscm
-        self.counting_decoder2 = CountingDecoder(self.in_channel, self.out_channel, 5)
-
-        self.decoder = AttDecoder(ratio, **attdecoder)
-
-        self.ratio = ratio
-
-    def forward(self, inputs, targets=None):
-        cnn_features, images_mask, labels = inputs
-
-        counting_mask = images_mask[:, :, :: self.ratio, :: self.ratio]
-        counting_preds1, _ = self.counting_decoder1(cnn_features, counting_mask)
-        counting_preds2, _ = self.counting_decoder2(cnn_features, counting_mask)
-        counting_preds = (counting_preds1 + counting_preds2) / 2
-
-        word_probs = self.decoder(cnn_features, labels, counting_preds, images_mask)
-        return word_probs, counting_preds, counting_preds1, counting_preds2
+# copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+This code is refer from:
+https://github.com/LBH1024/CAN/models/can.py
+https://github.com/LBH1024/CAN/models/counting.py
+https://github.com/LBH1024/CAN/models/decoder.py
+https://github.com/LBH1024/CAN/models/attention.py
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import paddle.nn as nn
+import paddle
+import math
+
+"""
+Counting Module
+"""
+
+
+class ChannelAtt(nn.Layer):
+    def __init__(self, channel, reduction):
+        super(ChannelAtt, self).__init__()
+        self.avg_pool = nn.AdaptiveAvgPool2D(1)
+
+        self.fc = nn.Sequential(
+            nn.Linear(channel, channel // reduction),
+            nn.ReLU(),
+            nn.Linear(channel // reduction, channel),
+            nn.Sigmoid(),
+        )
+
+    def forward(self, x):
+        b, c, _, _ = x.shape
+        y = paddle.reshape(self.avg_pool(x), [b, c])
+        y = paddle.reshape(self.fc(y), [b, c, 1, 1])
+        return x * y
+
+
+class CountingDecoder(nn.Layer):
+    def __init__(self, in_channel, out_channel, kernel_size):
+        super(CountingDecoder, self).__init__()
+        self.in_channel = in_channel
+        self.out_channel = out_channel
+
+        self.trans_layer = nn.Sequential(
+            nn.Conv2D(
+                self.in_channel,
+                512,
+                kernel_size=kernel_size,
+                padding=kernel_size // 2,
+                bias_attr=False,
+            ),
+            nn.BatchNorm2D(512),
+        )
+
+        self.channel_att = ChannelAtt(512, 16)
+
+        self.pred_layer = nn.Sequential(
+            nn.Conv2D(512, self.out_channel, kernel_size=1, bias_attr=False),
+            nn.Sigmoid(),
+        )
+
+    def forward(self, x, mask):
+        b, _, h, w = x.shape
+        x = self.trans_layer(x)
+        x = self.channel_att(x)
+        x = self.pred_layer(x)
+
+        if mask is not None:
+            x = x * mask
+        x = paddle.reshape(x, [b, self.out_channel, -1])
+        x1 = paddle.sum(x, axis=-1)
+
+        return x1, paddle.reshape(x, [b, self.out_channel, h, w])
+
+
+"""
+Attention Decoder
+"""
+
+
+class PositionEmbeddingSine(nn.Layer):
+    def __init__(
+        self, num_pos_feats=64, temperature=10000, normalize=False, scale=None
+    ):
+        super().__init__()
+        self.num_pos_feats = num_pos_feats
+        self.temperature = temperature
+        self.normalize = normalize
+        if scale is not None and normalize is False:
+            raise ValueError("normalize should be True if scale is passed")
+        if scale is None:
+            scale = 2 * math.pi
+        self.scale = scale
+
+    def forward(self, x, mask):
+        y_embed = paddle.cumsum(mask, 1, dtype="float32")
+        x_embed = paddle.cumsum(mask, 2, dtype="float32")
+
+        if self.normalize:
+            eps = 1e-6
+            y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
+            x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
+        dim_t = paddle.arange(self.num_pos_feats, dtype="float32")
+        dim_d = paddle.expand(paddle.to_tensor(2), dim_t.shape)
+        dim_t = self.temperature ** (
+            2 * (dim_t / dim_d).astype("int64") / self.num_pos_feats
+        )
+
+        pos_x = paddle.unsqueeze(x_embed, [3]) / dim_t
+        pos_y = paddle.unsqueeze(y_embed, [3]) / dim_t
+
+        pos_x = paddle.flatten(
+            paddle.stack(
+                [paddle.sin(pos_x[:, :, :, 0::2]), paddle.cos(pos_x[:, :, :, 1::2])],
+                axis=4,
+            ),
+            3,
+        )
+        pos_y = paddle.flatten(
+            paddle.stack(
+                [paddle.sin(pos_y[:, :, :, 0::2]), paddle.cos(pos_y[:, :, :, 1::2])],
+                axis=4,
+            ),
+            3,
+        )
+
+        pos = paddle.transpose(paddle.concat([pos_y, pos_x], axis=3), [0, 3, 1, 2])
+
+        return pos
+
+
+class AttDecoder(nn.Layer):
+    def __init__(
+        self,
+        ratio,
+        is_train,
+        input_size,
+        hidden_size,
+        encoder_out_channel,
+        dropout,
+        dropout_ratio,
+        word_num,
+        counting_decoder_out_channel,
+        attention,
+    ):
+        super(AttDecoder, self).__init__()
+        self.input_size = input_size
+        self.hidden_size = hidden_size
+        self.out_channel = encoder_out_channel
+        self.attention_dim = attention["attention_dim"]
+        self.dropout_prob = dropout
+        self.ratio = ratio
+        self.word_num = word_num
+
+        self.counting_num = counting_decoder_out_channel
+        self.is_train = is_train
+
+        self.init_weight = nn.Linear(self.out_channel, self.hidden_size)
+        self.embedding = nn.Embedding(self.word_num, self.input_size)
+        self.word_input_gru = nn.GRUCell(self.input_size, self.hidden_size)
+        self.word_attention = Attention(hidden_size, attention["attention_dim"])
+
+        self.encoder_feature_conv = nn.Conv2D(
+            self.out_channel,
+            self.attention_dim,
+            kernel_size=attention["word_conv_kernel"],
+            padding=attention["word_conv_kernel"] // 2,
+        )
+
+        self.word_state_weight = nn.Linear(self.hidden_size, self.hidden_size)
+        self.word_embedding_weight = nn.Linear(self.input_size, self.hidden_size)
+        self.word_context_weight = nn.Linear(self.out_channel, self.hidden_size)
+        self.counting_context_weight = nn.Linear(self.counting_num, self.hidden_size)
+        self.word_convert = nn.Linear(self.hidden_size, self.word_num)
+
+        if dropout:
+            self.dropout = nn.Dropout(dropout_ratio)
+
+    def forward(self, cnn_features, labels, counting_preds, images_mask):
+        if self.is_train:
+            _, num_steps = labels.shape
+        else:
+            num_steps = 36
+
+        batch_size, _, height, width = cnn_features.shape
+        images_mask = images_mask[:, :, :: self.ratio, :: self.ratio]
+
+        word_probs = paddle.zeros((batch_size, num_steps, self.word_num))
+        word_alpha_sum = paddle.zeros((batch_size, 1, height, width))
+
+        hidden = self.init_hidden(cnn_features, images_mask)
+        counting_context_weighted = self.counting_context_weight(counting_preds)
+        cnn_features_trans = self.encoder_feature_conv(cnn_features)
+
+        position_embedding = PositionEmbeddingSine(256, normalize=True)
+        pos = position_embedding(cnn_features_trans, images_mask[:, 0, :, :])
+
+        cnn_features_trans = cnn_features_trans + pos
+
+        word = paddle.ones([batch_size, 1], dtype="int64")  # init word as sos
+        word = word.squeeze(axis=1)
+        for i in range(num_steps):
+            word_embedding = self.embedding(word)
+            _, hidden = self.word_input_gru(word_embedding, hidden)
+            word_context_vec, _, word_alpha_sum = self.word_attention(
+                cnn_features, cnn_features_trans, hidden, word_alpha_sum, images_mask
+            )
+
+            current_state = self.word_state_weight(hidden)
+            word_weighted_embedding = self.word_embedding_weight(word_embedding)
+            word_context_weighted = self.word_context_weight(word_context_vec)
+
+            if self.dropout_prob:
+                word_out_state = self.dropout(
+                    current_state
+                    + word_weighted_embedding
+                    + word_context_weighted
+                    + counting_context_weighted
+                )
+            else:
+                word_out_state = (
+                    current_state
+                    + word_weighted_embedding
+                    + word_context_weighted
+                    + counting_context_weighted
+                )
+
+            word_prob = self.word_convert(word_out_state)
+            word_probs[:, i] = word_prob
+
+            if self.is_train:
+                word = labels[:, i]
+            else:
+                word = word_prob.argmax(1)
+                word = paddle.multiply(
+                    word, labels[:, i]
+                )  # labels are oneslike tensor in infer/predict mode
+
+        return word_probs
+
+    def init_hidden(self, features, feature_mask):
+        average = paddle.sum(
+            paddle.sum(features * feature_mask, axis=-1), axis=-1
+        ) / paddle.sum((paddle.sum(feature_mask, axis=-1)), axis=-1)
+        average = self.init_weight(average)
+        return paddle.tanh(average)
+
+
+"""
+Attention Module
+"""
+
+
+class Attention(nn.Layer):
+    def __init__(self, hidden_size, attention_dim):
+        super(Attention, self).__init__()
+        self.hidden = hidden_size
+        self.attention_dim = attention_dim
+        self.hidden_weight = nn.Linear(self.hidden, self.attention_dim)
+        self.attention_conv = nn.Conv2D(
+            1, 512, kernel_size=11, padding=5, bias_attr=False
+        )
+        self.attention_weight = nn.Linear(512, self.attention_dim, bias_attr=False)
+        self.alpha_convert = nn.Linear(self.attention_dim, 1)
+
+    def forward(
+        self, cnn_features, cnn_features_trans, hidden, alpha_sum, image_mask=None
+    ):
+        query = self.hidden_weight(hidden)
+        alpha_sum_trans = self.attention_conv(alpha_sum)
+        coverage_alpha = self.attention_weight(
+            paddle.transpose(alpha_sum_trans, [0, 2, 3, 1])
+        )
+        alpha_score = paddle.tanh(
+            paddle.unsqueeze(query, [1, 2])
+            + coverage_alpha
+            + paddle.transpose(cnn_features_trans, [0, 2, 3, 1])
+        )
+        energy = self.alpha_convert(alpha_score)
+        energy = energy - energy.max()
+        energy_exp = paddle.exp(paddle.squeeze(energy, -1))
+
+        if image_mask is not None:
+            energy_exp = energy_exp * paddle.squeeze(image_mask, 1)
+        alpha = energy_exp / (
+            paddle.unsqueeze(paddle.sum(paddle.sum(energy_exp, -1), -1), [1, 2]) + 1e-10
+        )
+        alpha_sum = paddle.unsqueeze(alpha, 1) + alpha_sum
+        context_vector = paddle.sum(
+            paddle.sum((paddle.unsqueeze(alpha, 1) * cnn_features), -1), -1
+        )
+
+        return context_vector, alpha, alpha_sum
+
+
+class CANHead(nn.Layer):
+    def __init__(self, in_channel, out_channel, ratio, attdecoder, **kwargs):
+        super(CANHead, self).__init__()
+
+        self.in_channel = in_channel
+        self.out_channel = out_channel
+
+        self.counting_decoder1 = CountingDecoder(
+            self.in_channel, self.out_channel, 3
+        )  # mscm
+        self.counting_decoder2 = CountingDecoder(self.in_channel, self.out_channel, 5)
+
+        self.decoder = AttDecoder(ratio, **attdecoder)
+
+        self.ratio = ratio
+
+    def forward(self, inputs, targets=None):
+        cnn_features, images_mask, labels = inputs
+
+        counting_mask = images_mask[:, :, :: self.ratio, :: self.ratio]
+        counting_preds1, _ = self.counting_decoder1(cnn_features, counting_mask)
+        counting_preds2, _ = self.counting_decoder2(cnn_features, counting_mask)
+        counting_preds = (counting_preds1 + counting_preds2) / 2
+
+        word_probs = self.decoder(cnn_features, labels, counting_preds, images_mask)
+        return word_probs, counting_preds, counting_preds1, counting_preds2
diff --git a/ppocr/modeling/heads/rec_rfl_head.py b/ppocr/modeling/heads/rec_rfl_head.py
index e56d6f8fc..bd7efd422 100644
--- a/ppocr/modeling/heads/rec_rfl_head.py
+++ b/ppocr/modeling/heads/rec_rfl_head.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/blob/main/davarocr/davar_rcg/models/sequence_heads/counting_head.py
 """
 import paddle
diff --git a/ppocr/modeling/heads/rec_robustscanner_head.py b/ppocr/modeling/heads/rec_robustscanner_head.py
index 2289f1c8d..5510ee8fe 100644
--- a/ppocr/modeling/heads/rec_robustscanner_head.py
+++ b/ppocr/modeling/heads/rec_robustscanner_head.py
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/encoders/channel_reduction_encoder.py
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/decoders/robust_scanner_decoder.py
 """
diff --git a/ppocr/modeling/heads/rec_sar_head.py b/ppocr/modeling/heads/rec_sar_head.py
index 928e44563..9c646a1d6 100644
--- a/ppocr/modeling/heads/rec_sar_head.py
+++ b/ppocr/modeling/heads/rec_sar_head.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/encoders/sar_encoder.py
 https://github.com/open-mmlab/mmocr/blob/main/mmocr/models/textrecog/decoders/sar_decoder.py
 """
diff --git a/ppocr/modeling/heads/rec_satrn_head.py b/ppocr/modeling/heads/rec_satrn_head.py
index 6367fc51a..8f204744b 100644
--- a/ppocr/modeling/heads/rec_satrn_head.py
+++ b/ppocr/modeling/heads/rec_satrn_head.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/open-mmlab/mmocr/blob/1.x/mmocr/models/textrecog/encoders/satrn_encoder.py
 https://github.com/open-mmlab/mmocr/blob/1.x/mmocr/models/textrecog/decoders/nrtr_decoder.py
 """
diff --git a/ppocr/modeling/heads/rec_spin_att_head.py b/ppocr/modeling/heads/rec_spin_att_head.py
index 87f3dadde..930d9704e 100644
--- a/ppocr/modeling/heads/rec_spin_att_head.py
+++ b/ppocr/modeling/heads/rec_spin_att_head.py
@@ -13,7 +13,7 @@
 # limitations under the License.
 
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/davarocr/davar_rcg/models/sequence_heads/att_head.py
 """
 
diff --git a/ppocr/modeling/heads/rec_visionlan_head.py b/ppocr/modeling/heads/rec_visionlan_head.py
index 21b401721..755856f46 100644
--- a/ppocr/modeling/heads/rec_visionlan_head.py
+++ b/ppocr/modeling/heads/rec_visionlan_head.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/wangyuxin87/VisionLAN
 """
 
diff --git a/ppocr/modeling/heads/self_attention.py b/ppocr/modeling/heads/self_attention.py
index 59aff21b0..4f3ad3216 100644
--- a/ppocr/modeling/heads/self_attention.py
+++ b/ppocr/modeling/heads/self_attention.py
@@ -341,9 +341,11 @@ class PrePostProcessLayer(nn.Layer):
                 )
             elif cmd == "d":  # add dropout
                 self.functors.append(
-                    lambda x: F.dropout(x, p=dropout_rate, mode="downscale_in_infer")
-                    if dropout_rate
-                    else x
+                    lambda x: (
+                        F.dropout(x, p=dropout_rate, mode="downscale_in_infer")
+                        if dropout_rate
+                        else x
+                    )
                 )
 
     def forward(self, x, residual=None):
diff --git a/ppocr/modeling/necks/rf_adaptor.py b/ppocr/modeling/necks/rf_adaptor.py
index 3c30fe3b3..e27e8940d 100644
--- a/ppocr/modeling/necks/rf_adaptor.py
+++ b/ppocr/modeling/necks/rf_adaptor.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 """
-This code is refer from: 
+This code is refer from:
 https://github.com/hikopensource/DAVAR-Lab-OCR/blob/main/davarocr/davar_rcg/models/connects/single_block/RFAdaptor.py
 """
 
diff --git a/ppocr/modeling/transforms/tbsrn.py b/ppocr/modeling/transforms/tbsrn.py
index 4d1d373e4..a1fa16772 100644
--- a/ppocr/modeling/transforms/tbsrn.py
+++ b/ppocr/modeling/transforms/tbsrn.py
@@ -150,7 +150,7 @@ class TBSRN(nn.Layer):
         upsample_block_num = int(math.log(scale_factor, 2))
         self.block1 = nn.Sequential(
             nn.Conv2D(in_planes, 2 * hidden_units, kernel_size=9, padding=4),
-            nn.PReLU()
+            nn.PReLU(),
             # nn.ReLU()
         )
         self.srb_nums = srb_nums
diff --git a/ppocr/postprocess/pse_postprocess/pse/pse.pyx b/ppocr/postprocess/pse_postprocess/pse/pse.pyx
index b2be49e94..dfa90f39f 100644
--- a/ppocr/postprocess/pse_postprocess/pse/pse.pyx
+++ b/ppocr/postprocess/pse_postprocess/pse/pse.pyx
@@ -67,4 +67,4 @@ cdef np.ndarray[np.int32_t, ndim=2] _pse(np.ndarray[np.uint8_t, ndim=3] kernels,
 def pse(kernels, min_area):
     kernel_num = kernels.shape[0]
     label_num, label = cv2.connectedComponents(kernels[-1], connectivity=4)
-    return _pse(kernels[:-1], label, kernel_num, label_num, min_area)
\ No newline at end of file
+    return _pse(kernels[:-1], label, kernel_num, label_num, min_area)
diff --git a/ppocr/postprocess/rec_postprocess.py b/ppocr/postprocess/rec_postprocess.py
index 412a83969..46b629d53 100644
--- a/ppocr/postprocess/rec_postprocess.py
+++ b/ppocr/postprocess/rec_postprocess.py
@@ -1133,9 +1133,11 @@ class VLLabelDecode(BaseRecLabelDecode):
             preds_idx = net_out[start_idx:end_idx].topk(1)[1][:, 0].tolist()
             preds_text = "".join(
                 [
-                    self.character[idx - 1]
-                    if idx > 0 and idx <= len(self.character)
-                    else ""
+                    (
+                        self.character[idx - 1]
+                        if idx > 0 and idx <= len(self.character)
+                        else ""
+                    )
                     for idx in preds_idx
                 ]
             )
diff --git a/ppocr/utils/EN_symbol_dict.txt b/ppocr/utils/EN_symbol_dict.txt
index 1aef43d6b..87c6d6785 100644
--- a/ppocr/utils/EN_symbol_dict.txt
+++ b/ppocr/utils/EN_symbol_dict.txt
@@ -91,4 +91,4 @@ _
 {
 |
 }
-~
\ No newline at end of file
+~
diff --git a/ppocr/utils/dict/gujarati_dict.txt b/ppocr/utils/dict/gujarati_dict.txt
index 08c8bad67..36f55892a 100644
--- a/ppocr/utils/dict/gujarati_dict.txt
+++ b/ppocr/utils/dict/gujarati_dict.txt
@@ -45,4 +45,4 @@
 શ
 ષ
 સ
-હ
\ No newline at end of file
+હ
diff --git a/ppocr/utils/dict/kazakh_dict.txt b/ppocr/utils/dict/kazakh_dict.txt
index 15bac40be..007d4f5fa 100644
--- a/ppocr/utils/dict/kazakh_dict.txt
+++ b/ppocr/utils/dict/kazakh_dict.txt
@@ -39,4 +39,4 @@
 Ь
 Э
 Ю
-Я
\ No newline at end of file
+Я
diff --git a/ppocr/utils/dict/latex_symbol_dict.txt b/ppocr/utils/dict/latex_symbol_dict.txt
index b43f1fa8b..69b1b841c 100644
--- a/ppocr/utils/dict/latex_symbol_dict.txt
+++ b/ppocr/utils/dict/latex_symbol_dict.txt
@@ -1,111 +1,111 @@
-eos
-sos
-!
-'
-(
-)
-+
-,
--
-.
-/
-0
-1
-2
-3
-4
-5
-6
-7
-8
-9
-<
-=
->
-A
-B
-C
-E
-F
-G
-H
-I
-L
-M
-N
-P
-R
-S
-T
-V
-X
-Y
-[
-\Delta
-\alpha
-\beta
-\cdot
-\cdots
-\cos
-\div
-\exists
-\forall
-\frac
-\gamma
-\geq
-\in
-\infty
-\int
-\lambda
-\ldots
-\leq
-\lim
-\log
-\mu
-\neq
-\phi
-\pi
-\pm
-\prime
-\rightarrow
-\sigma
-\sin
-\sqrt
-\sum
-\tan
-\theta
-\times
-]
-a
-b
-c
-d
-e
-f
-g
-h
-i
-j
-k
-l
-m
-n
-o
-p
-q
-r
-s
-t
-u
-v
-w
-x
-y
-z
-\{
-|
-\}
-{
-}
-^
-_
\ No newline at end of file
+eos
+sos
+!
+'
+(
+)
++
+,
+-
+.
+/
+0
+1
+2
+3
+4
+5
+6
+7
+8
+9
+<
+=
+>
+A
+B
+C
+E
+F
+G
+H
+I
+L
+M
+N
+P
+R
+S
+T
+V
+X
+Y
+[
+\Delta
+\alpha
+\beta
+\cdot
+\cdots
+\cos
+\div
+\exists
+\forall
+\frac
+\gamma
+\geq
+\in
+\infty
+\int
+\lambda
+\ldots
+\leq
+\lim
+\log
+\mu
+\neq
+\phi
+\pi
+\pm
+\prime
+\rightarrow
+\sigma
+\sin
+\sqrt
+\sum
+\tan
+\theta
+\times
+]
+a
+b
+c
+d
+e
+f
+g
+h
+i
+j
+k
+l
+m
+n
+o
+p
+q
+r
+s
+t
+u
+v
+w
+x
+y
+z
+\{
+|
+\}
+{
+}
+^
+_
diff --git a/ppocr/utils/dict/layout_dict/layout_cdla_dict.txt b/ppocr/utils/dict/layout_dict/layout_cdla_dict.txt
index 8be0f4860..538033433 100644
--- a/ppocr/utils/dict/layout_dict/layout_cdla_dict.txt
+++ b/ppocr/utils/dict/layout_dict/layout_cdla_dict.txt
@@ -7,4 +7,4 @@ table_caption
 header
 footer
 reference
-equation
\ No newline at end of file
+equation
diff --git a/ppocr/utils/dict/layout_dict/layout_publaynet_dict.txt b/ppocr/utils/dict/layout_dict/layout_publaynet_dict.txt
index ca6acf4ee..d9c92dacb 100644
--- a/ppocr/utils/dict/layout_dict/layout_publaynet_dict.txt
+++ b/ppocr/utils/dict/layout_dict/layout_publaynet_dict.txt
@@ -2,4 +2,4 @@ text
 title
 list
 table
-figure
\ No newline at end of file
+figure
diff --git a/ppocr/utils/dict/layout_dict/layout_table_dict.txt b/ppocr/utils/dict/layout_dict/layout_table_dict.txt
index faea15ea0..ecbe2e6d5 100644
--- a/ppocr/utils/dict/layout_dict/layout_table_dict.txt
+++ b/ppocr/utils/dict/layout_dict/layout_table_dict.txt
@@ -1 +1 @@
-table
\ No newline at end of file
+table
diff --git a/ppocr/utils/dict/parseq_dict.txt b/ppocr/utils/dict/parseq_dict.txt
index 1aef43d6b..87c6d6785 100644
--- a/ppocr/utils/dict/parseq_dict.txt
+++ b/ppocr/utils/dict/parseq_dict.txt
@@ -91,4 +91,4 @@ _
 {
 |
 }
-~
\ No newline at end of file
+~
diff --git a/ppocr/utils/dict/spin_dict.txt b/ppocr/utils/dict/spin_dict.txt
index 8ee8347fd..b6c16c835 100644
--- a/ppocr/utils/dict/spin_dict.txt
+++ b/ppocr/utils/dict/spin_dict.txt
@@ -65,4 +65,4 @@ $
 ~
 \
 }
-^
\ No newline at end of file
+^
diff --git a/ppocr/utils/dict/table_structure_dict.txt b/ppocr/utils/dict/table_structure_dict.txt
index 8edb10b88..fec6f7dfe 100644
--- a/ppocr/utils/dict/table_structure_dict.txt
+++ b/ppocr/utils/dict/table_structure_dict.txt
@@ -25,4 +25,4 @@
  rowspan="8"
  rowspan="6"
  rowspan="7"
- rowspan="10"
\ No newline at end of file
+ rowspan="10"
diff --git a/ppocr/utils/dict/ug_dict.txt b/ppocr/utils/dict/ug_dict.txt
index a3fd6d00f..8fc0db707 100644
--- a/ppocr/utils/dict/ug_dict.txt
+++ b/ppocr/utils/dict/ug_dict.txt
@@ -128,4 +128,4 @@ Z
 ې
 ى
 ي
-ئ
\ No newline at end of file
+ئ
diff --git a/ppocr/utils/dict90.txt b/ppocr/utils/dict90.txt
index a945ae9c5..46b879507 100644
--- a/ppocr/utils/dict90.txt
+++ b/ppocr/utils/dict90.txt
@@ -87,4 +87,4 @@ $
 ]
 _
 `
-~
\ No newline at end of file
+~
diff --git a/ppocr/utils/e2e_metric/polygon_fast.py b/ppocr/utils/e2e_metric/polygon_fast.py
index 2e2d947c8..e2a08c6fb 100755
--- a/ppocr/utils/e2e_metric/polygon_fast.py
+++ b/ppocr/utils/e2e_metric/polygon_fast.py
@@ -15,7 +15,7 @@ import numpy as np
 from shapely.geometry import Polygon
 
 """
-:param det_x: [1, N] Xs of detection's vertices 
+:param det_x: [1, N] Xs of detection's vertices
 :param det_y: [1, N] Ys of detection's vertices
 :param gt_x: [1, N] Xs of groundtruth's vertices
 :param gt_y: [1, N] Ys of groundtruth's vertices
diff --git a/ppocr/utils/ic15_dict.txt b/ppocr/utils/ic15_dict.txt
index 474060366..710436890 100644
--- a/ppocr/utils/ic15_dict.txt
+++ b/ppocr/utils/ic15_dict.txt
@@ -33,4 +33,4 @@ v
 w
 x
 y
-z
\ No newline at end of file
+z
diff --git a/ppocr/utils/ppocr_keys_v1.txt b/ppocr/utils/ppocr_keys_v1.txt
index 84b885d83..b75af2130 100644
--- a/ppocr/utils/ppocr_keys_v1.txt
+++ b/ppocr/utils/ppocr_keys_v1.txt
@@ -6620,4 +6620,4 @@ j
 緖
 續
 紹
-懮
\ No newline at end of file
+懮
diff --git a/ppstructure/README.md b/ppstructure/README.md
index 6d426157e..f52b5b9ce 100644
--- a/ppstructure/README.md
+++ b/ppstructure/README.md
@@ -91,7 +91,7 @@ In the figure, the red box represents `Question`, the blue box represents `Answe
 
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186095641-5843b4da-34d7-4c1c-943a-b1036a859fe3.png" width="600">
diff --git a/ppstructure/README_ch.md b/ppstructure/README_ch.md
index 019e84c1a..9d78df62f 100644
--- a/ppstructure/README_ch.md
+++ b/ppstructure/README_ch.md
@@ -103,7 +103,7 @@ PP-StructureV2支持各个模块独立使用或灵活搭配,如,可以单独
 
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
-</div>  
+</div>
 
 <div align="center">
     <img src="https://user-images.githubusercontent.com/25809855/186095641-5843b4da-34d7-4c1c-943a-b1036a859fe3.png" width="600">
diff --git a/ppstructure/docs/PP-StructureV2_introduction.md b/ppstructure/docs/PP-StructureV2_introduction.md
index 555fc4560..865f93606 100644
--- a/ppstructure/docs/PP-StructureV2_introduction.md
+++ b/ppstructure/docs/PP-StructureV2_introduction.md
@@ -24,9 +24,9 @@ PP-StructureV2在PP-StructureV1的基础上进一步改进,主要有以下3个
 
  * **系统功能升级** :新增图像矫正和版面复原模块,图像转word/pdf、关键信息抽取能力全覆盖!
  * **系统性能优化** :
-	 * 版面分析:发布轻量级版面分析模型,速度提升**11倍**,平均CPU耗时仅需**41ms**!
-	 * 表格识别:设计3大优化策略,预测耗时不变情况下,模型精度提升**6%**。
-	 * 关键信息抽取:设计视觉无关模型结构,语义实体识别精度提升**2.8%**,关系抽取精度提升**9.1%**。
+     * 版面分析:发布轻量级版面分析模型,速度提升**11倍**,平均CPU耗时仅需**41ms**!
+     * 表格识别:设计3大优化策略,预测耗时不变情况下,模型精度提升**6%**。
+     * 关键信息抽取:设计视觉无关模型结构,语义实体识别精度提升**2.8%**,关系抽取精度提升**9.1%**。
  * **中文场景适配** :完成对版面分析与表格识别的中文场景适配,开源**开箱即用**的中文场景版面结构化模型!
 
 PP-StructureV2系统流程图如下所示,文档图像首先经过图像矫正模块,判断整图方向并完成转正,随后可以完成版面信息分析与关键信息抽取2类任务。版面分析任务中,图像首先经过版面分析模型,将图像划分为文本、表格、图像等不同区域,随后对这些区域分别进行识别,如,将表格区域送入表格识别模块进行结构化识别,将文本区域送入OCR引擎进行文字识别,最后使用版面恢复模块将其恢复为与原始图像布局一致的word或者pdf格式的文件;关键信息抽取任务中,首先使用OCR引擎提取文本内容,然后由语义实体识别模块获取图像中的语义实体,最后经关系抽取模块获取语义实体之间的对应关系,从而提取需要的关键信息。
@@ -39,18 +39,18 @@ PP-StructureV2系统流程图如下所示,文档图像首先经过图像矫正
 从算法改进思路来看,对系统中的3个关键子模块,共进行了8个方面的改进。
 
 * 版面分析
-	* PP-PicoDet:轻量级版面分析模型
-	* FGD:兼顾全局与局部特征的模型蒸馏算法
+    * PP-PicoDet:轻量级版面分析模型
+    * FGD:兼顾全局与局部特征的模型蒸馏算法
 
 * 表格识别
-	* PP-LCNet:  CPU友好型轻量级骨干网络
-	* CSP-PAN:轻量级高低层特征融合模块
-	* SLAHead:结构与位置信息对齐的特征解码模块
+    * PP-LCNet:  CPU友好型轻量级骨干网络
+    * CSP-PAN:轻量级高低层特征融合模块
+    * SLAHead:结构与位置信息对齐的特征解码模块
 
 * 关键信息抽取
-	* VI-LayoutXLM:视觉特征无关的多模态预训练模型结构
-	* TB-YX:考虑阅读顺序的文本行排序逻辑
-	* UDML:联合互学习知识蒸馏策略
+    * VI-LayoutXLM:视觉特征无关的多模态预训练模型结构
+    * TB-YX:考虑阅读顺序的文本行排序逻辑
+    * UDML:联合互学习知识蒸馏策略
 
 最终,与PP-StructureV1相比:
 
@@ -87,8 +87,8 @@ PP-StructureV2系统流程图如下所示,文档图像首先经过图像矫正
 | 5    | PP-PicoDet-LCNet1.0x(800*608) + FGD |  9.7  | 94.00% |41.20|
 
 * 测试条件
-	* paddle版本:2.3.0
-	* CPU:Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz,开启mkldnn,线程数为10
+    * paddle版本:2.3.0
+    * CPU:Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz,开启mkldnn,线程数为10
 
 在PubLayNet数据集上,与其他方法的性能对比如下表所示。可以看到,和基于Detectron2的版面分析工具layoutparser相比,我们的模型精度高出大约5%,预测速度快约69倍。
 
@@ -167,11 +167,11 @@ FGD(Focal and Global Knowledge Distillation for Detectors),是一种兼顾
 
 |策略|Acc|TEDS|推理速度(CPU+MKLDNN)|模型大小|
 |---|---|---|---|---|
-|TableRec-RARE|	71.73% | 93.88% |779ms	|6.8M|
-|+PP-LCNet|	74.71% |94.37%	|778ms|	8.7M|
-|+CSP-PAN|	75.68%| 94.72%	|708ms|	9.3M|
-|+SLAHead|	77.70%|94.85%|	766ms|	9.2M|
-|+MergeToken|	76.31%|	95.89%|766ms|	9.2M|
+|TableRec-RARE| 71.73% | 93.88% |779ms  |6.8M|
+|+PP-LCNet| 74.71% |94.37%  |778ms| 8.7M|
+|+CSP-PAN|  75.68%| 94.72%  |708ms| 9.3M|
+|+SLAHead|  77.70%|94.85%|  766ms|  9.2M|
+|+MergeToken|   76.31%| 95.89%|766ms|   9.2M|
 
 * 测试环境
     * paddle版本:2.3.1
@@ -182,8 +182,8 @@ FGD(Focal and Global Knowledge Distillation for Detectors),是一种兼顾
 |策略|Acc|TEDS|推理速度(CPU+MKLDNN)|模型大小|
 |---|---|---|---|---|
 |TableMaster|77.90%|96.12%|2144ms|253.0M|
-|TableRec-RARE|	71.73% | 93.88% |779ms	|6.8M|
-|SLANet|76.31%|	95.89%|766ms|9.2M|
+|TableRec-RARE| 71.73% | 93.88% |779ms  |6.8M|
+|SLANet|76.31%| 95.89%|766ms|9.2M|
 
 #### 4.2.1 优化策略
 
@@ -283,9 +283,9 @@ XFUND-zh数据集上,SER任务的消融实验如下所示。
 | 5    | 实验3 + UDML蒸馏                | 1.1     | **93.19%** | **15.49**       | **675.58**      |
 
 * 测试条件
-	* paddle版本:2.3.0
-	* GPU:V100,实验5的GPU预测耗时使用`trt+fp16`测试得到,环境为cuda10.2+ cudnn8.1.1 + trt7.2.3.4,其他实验的预测耗时统计中没有使用TRT。
-	* CPU:Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz,开启mkldnn,线程数为10
+    * paddle版本:2.3.0
+    * GPU:V100,实验5的GPU预测耗时使用`trt+fp16`测试得到,环境为cuda10.2+ cudnn8.1.1 + trt7.2.3.4,其他实验的预测耗时统计中没有使用TRT。
+    * CPU:Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz,开启mkldnn,线程数为10
 
 在XFUND数据集上,与其他方法的效果对比如下所示。
 
@@ -318,10 +318,10 @@ LayoutLMv2以及LayoutXLM中引入视觉骨干网络,用于提取视觉特征
 
 ```py
 def order_by_tbyx(ocr_info, th=20):
-	"""
-	ocr_info: a list of dict, which contains bbox information([x1, y1, x2, y2])
-	th: threshold of the position threshold
-	"""
+    """
+    ocr_info: a list of dict, which contains bbox information([x1, y1, x2, y2])
+    th: threshold of the position threshold
+    """
     res = sorted(ocr_info, key=lambda r: (r["bbox"][1], r["bbox"][0])) # sort using y1 first and then x1
     for i in range(len(res) - 1):
         for j in range(i, 0, -1):
diff --git a/ppstructure/docs/quickstart.md b/ppstructure/docs/quickstart.md
index a24f0b084..efff4a8c3 100644
--- a/ppstructure/docs/quickstart.md
+++ b/ppstructure/docs/quickstart.md
@@ -56,7 +56,7 @@ pip3 install paddleclas>=2.4.3
 ## 2. 便捷使用
 
 <a name="21"></a>
-### 2.1 命令行使用  
+### 2.1 命令行使用
 
 <a name="211"></a>
 #### 2.1.1 图像方向分类+版面分析+表格识别
diff --git a/ppstructure/docs/quickstart_en.md b/ppstructure/docs/quickstart_en.md
index 672d34470..79217bdcb 100644
--- a/ppstructure/docs/quickstart_en.md
+++ b/ppstructure/docs/quickstart_en.md
@@ -15,7 +15,7 @@
     - [2.2.3 layout analysis](#223-layout-analysis)
     - [2.2.4 table recognition](#224-table-recognition)
     - [2.2.5 Key Information Extraction](#225-Key-Information-Extraction)
-    - [2.2.6 layout recovery](#226-layout-recovery)  
+    - [2.2.6 layout recovery](#226-layout-recovery)
   - [2.3 Result description](#23-result-description)
     - [2.3.1 layout analysis + table recognition](#231-layout-analysis--table-recognition)
     - [2.3.2 Key Information Extraction](#232-Key-Information-Extraction)
diff --git a/ppstructure/pdf2word/pdf2word.py b/ppstructure/pdf2word/pdf2word.py
index c9e61eee8..0e9160543 100644
--- a/ppstructure/pdf2word/pdf2word.py
+++ b/ppstructure/pdf2word/pdf2word.py
@@ -1,537 +1,537 @@
-# copyright (c) 2022 PaddlePaddle Authors. All Rights Reserve.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import tarfile
-import os
-import time
-import datetime
-import functools
-import cv2
-import platform
-import numpy as np
-from paddle.utils import try_import
-
-fitz = try_import("fitz")
-from PIL import Image
-from qtpy.QtWidgets import (
-    QApplication,
-    QWidget,
-    QPushButton,
-    QProgressBar,
-    QGridLayout,
-    QMessageBox,
-    QLabel,
-    QFileDialog,
-    QCheckBox,
-)
-from qtpy.QtCore import Signal, QThread, QObject
-from qtpy.QtGui import QImage, QPixmap, QIcon
-
-file = os.path.dirname(os.path.abspath(__file__))
-root = os.path.abspath(os.path.join(file, "../../"))
-sys.path.append(file)
-sys.path.insert(0, root)
-
-from ppstructure.predict_system import StructureSystem, save_structure_res
-from ppstructure.utility import parse_args, draw_structure_result
-from ppocr.utils.network import download_with_progressbar
-from ppstructure.recovery.recovery_to_doc import sorted_layout_boxes, convert_info_docx
-
-# from ScreenShotWidget import ScreenShotWidget
-
-__APPNAME__ = "pdf2word"
-__VERSION__ = "0.2.2"
-
-URLs_EN = {
-    # 下载超英文轻量级PP-OCRv3模型的检测模型并解压
-    "en_PP-OCRv3_det_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar",
-    # 下载英文轻量级PP-OCRv3模型的识别模型并解压
-    "en_PP-OCRv3_rec_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar",
-    # 下载超轻量级英文表格英文模型并解压
-    "en_ppstructure_mobile_v2.0_SLANet_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/slanet/en_ppstructure_mobile_v2.0_SLANet_infer.tar",
-    # 英文版面分析模型
-    "picodet_lcnet_x1_0_fgd_layout_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/layout/picodet_lcnet_x1_0_fgd_layout_infer.tar",
-}
-DICT_EN = {
-    "rec_char_dict_path": "en_dict.txt",
-    "layout_dict_path": "layout_publaynet_dict.txt",
-}
-
-URLs_CN = {
-    # 下载超中文轻量级PP-OCRv3模型的检测模型并解压
-    "cn_PP-OCRv3_det_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar",
-    # 下载中文轻量级PP-OCRv3模型的识别模型并解压
-    "cn_PP-OCRv3_rec_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar",
-    # 下载超轻量级英文表格英文模型并解压
-    "cn_ppstructure_mobile_v2.0_SLANet_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/slanet/en_ppstructure_mobile_v2.0_SLANet_infer.tar",
-    # 中文版面分析模型
-    "picodet_lcnet_x1_0_fgd_layout_cdla_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/layout/picodet_lcnet_x1_0_fgd_layout_cdla_infer.tar",
-}
-DICT_CN = {
-    "rec_char_dict_path": "ppocr_keys_v1.txt",
-    "layout_dict_path": "layout_cdla_dict.txt",
-}
-
-
-def QImageToCvMat(incomingImage) -> np.array:
-    """
-    Converts a QImage into an opencv MAT format
-    """
-
-    incomingImage = incomingImage.convertToFormat(QImage.Format.Format_RGBA8888)
-
-    width = incomingImage.width()
-    height = incomingImage.height()
-
-    ptr = incomingImage.bits()
-    ptr.setsize(height * width * 4)
-    arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
-    return arr
-
-
-def readImage(image_file) -> list:
-    if os.path.basename(image_file)[-3:] == "pdf":
-        imgs = []
-        with fitz.open(image_file) as pdf:
-            for pg in range(0, pdf.pageCount):
-                page = pdf[pg]
-                mat = fitz.Matrix(2, 2)
-                pm = page.getPixmap(matrix=mat, alpha=False)
-
-                # if width or height > 2000 pixels, don't enlarge the image
-                if pm.width > 2000 or pm.height > 2000:
-                    pm = page.getPixmap(matrix=fitz.Matrix(1, 1), alpha=False)
-
-                img = Image.frombytes("RGB", [pm.width, pm.height], pm.samples)
-                img = cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR)
-                imgs.append(img)
-    else:
-        img = cv2.imread(image_file, cv2.IMREAD_COLOR)
-        if img is not None:
-            imgs = [img]
-
-    return imgs
-
-
-class Worker(QThread):
-    progressBarValue = Signal(int)
-    progressBarRange = Signal(int)
-    endsignal = Signal()
-    exceptedsignal = Signal(str)  # 发送一个异常信号
-    loopFlag = True
-
-    def __init__(self, predictors, save_pdf, vis_font_path, use_pdf2docx_api):
-        super(Worker, self).__init__()
-        self.predictors = predictors
-        self.save_pdf = save_pdf
-        self.vis_font_path = vis_font_path
-        self.lang = "EN"
-        self.imagePaths = []
-        self.use_pdf2docx_api = use_pdf2docx_api
-        self.outputDir = None
-        self.totalPageCnt = 0
-        self.pageCnt = 0
-        self.setStackSize(1024 * 1024)
-
-    def setImagePath(self, imagePaths):
-        self.imagePaths = imagePaths
-
-    def setLang(self, lang):
-        self.lang = lang
-
-    def setOutputDir(self, outputDir):
-        self.outputDir = outputDir
-
-    def setPDFParser(self, enabled):
-        self.use_pdf2docx_api = enabled
-
-    def resetPageCnt(self):
-        self.pageCnt = 0
-
-    def resetTotalPageCnt(self):
-        self.totalPageCnt = 0
-
-    def ppocrPrecitor(self, imgs, img_name):
-        all_res = []
-        # update progress bar ranges
-        self.totalPageCnt += len(imgs)
-        self.progressBarRange.emit(self.totalPageCnt)
-        # processing pages
-        for index, img in enumerate(imgs):
-            res, time_dict = self.predictors[self.lang](img)
-
-            # save output
-            save_structure_res(res, self.outputDir, img_name)
-            # draw_img = draw_structure_result(img, res, self.vis_font_path)
-            # img_save_path = os.path.join(self.outputDir, img_name, 'show_{}.jpg'.format(index))
-            # if res != []:
-            #     cv2.imwrite(img_save_path, draw_img)
-
-            # recovery
-            h, w, _ = img.shape
-            res = sorted_layout_boxes(res, w)
-            all_res += res
-            self.pageCnt += 1
-            self.progressBarValue.emit(self.pageCnt)
-
-        if all_res != []:
-            try:
-                convert_info_docx(imgs, all_res, self.outputDir, img_name)
-            except Exception as ex:
-                print(
-                    "error in layout recovery image:{}, err msg: {}".format(
-                        img_name, ex
-                    )
-                )
-        print("Predict time : {:.3f}s".format(time_dict["all"]))
-        print("result save to {}".format(self.outputDir))
-
-    def run(self):
-        self.resetPageCnt()
-        self.resetTotalPageCnt()
-        try:
-            os.makedirs(self.outputDir, exist_ok=True)
-            for i, image_file in enumerate(self.imagePaths):
-                if not self.loopFlag:
-                    break
-                # using use_pdf2docx_api for PDF parsing
-                if self.use_pdf2docx_api and os.path.basename(image_file)[-3:] == "pdf":
-                    try_import("pdf2docx")
-                    from pdf2docx.converter import Converter
-
-                    self.totalPageCnt += 1
-                    self.progressBarRange.emit(self.totalPageCnt)
-                    print("===============using use_pdf2docx_api===============")
-                    img_name = os.path.basename(image_file).split(".")[0]
-                    docx_file = os.path.join(self.outputDir, "{}.docx".format(img_name))
-                    cv = Converter(image_file)
-                    cv.convert(docx_file)
-                    cv.close()
-                    print("docx save to {}".format(docx_file))
-                    self.pageCnt += 1
-                    self.progressBarValue.emit(self.pageCnt)
-                else:
-                    # using PPOCR for PDF/Image parsing
-                    imgs = readImage(image_file)
-                    if len(imgs) == 0:
-                        continue
-                    img_name = os.path.basename(image_file).split(".")[0]
-                    os.makedirs(os.path.join(self.outputDir, img_name), exist_ok=True)
-                    self.ppocrPrecitor(imgs, img_name)
-                # file processed
-            self.endsignal.emit()
-            # self.exec()
-        except Exception as e:
-            self.exceptedsignal.emit(str(e))  # 将异常发送给UI进程
-
-
-class APP_Image2Doc(QWidget):
-    def __init__(self):
-        super().__init__()
-        # self.setFixedHeight(100)
-        # self.setFixedWidth(520)
-
-        # settings
-        self.imagePaths = []
-        # self.screenShotWg = ScreenShotWidget()
-        self.screenShot = None
-        self.save_pdf = False
-        self.output_dir = None
-        self.vis_font_path = os.path.join(root, "doc", "fonts", "simfang.ttf")
-        self.use_pdf2docx_api = False
-
-        # ProgressBar
-        self.pb = QProgressBar()
-        self.pb.setRange(0, 100)
-        self.pb.setValue(0)
-
-        # 初始化界面
-        self.setupUi()
-
-        # 下载模型
-        self.downloadModels(URLs_EN)
-        self.downloadModels(URLs_CN)
-
-        # 初始化模型
-        predictors = {
-            "EN": self.initPredictor("EN"),
-            "CN": self.initPredictor("CN"),
-        }
-
-        # 设置工作进程
-        self._thread = Worker(
-            predictors, self.save_pdf, self.vis_font_path, self.use_pdf2docx_api
-        )
-        self._thread.progressBarValue.connect(self.handleProgressBarUpdateSingal)
-        self._thread.endsignal.connect(self.handleEndsignalSignal)
-        # self._thread.finished.connect(QObject.deleteLater)
-        self._thread.progressBarRange.connect(self.handleProgressBarRangeSingal)
-        self._thread.exceptedsignal.connect(self.handleThreadException)
-        self.time_start = 0  # save start time
-
-    def setupUi(self):
-        self.setObjectName("MainWindow")
-        self.setWindowTitle(__APPNAME__ + " " + __VERSION__)
-
-        layout = QGridLayout()
-
-        self.openFileButton = QPushButton("打开文件")
-        self.openFileButton.setIcon(QIcon(QPixmap("./icons/folder-plus.png")))
-        layout.addWidget(self.openFileButton, 0, 0, 1, 1)
-        self.openFileButton.clicked.connect(self.handleOpenFileSignal)
-
-        # screenShotButton = QPushButton("截图识别")
-        # layout.addWidget(screenShotButton, 0, 1, 1, 1)
-        # screenShotButton.clicked.connect(self.screenShotSlot)
-        # screenShotButton.setEnabled(False) # temporarily disenble
-
-        self.startCNButton = QPushButton("中文转换")
-        self.startCNButton.setIcon(QIcon(QPixmap("./icons/chinese.png")))
-        layout.addWidget(self.startCNButton, 0, 1, 1, 1)
-        self.startCNButton.clicked.connect(
-            functools.partial(self.handleStartSignal, "CN", False)
-        )
-
-        self.startENButton = QPushButton("英文转换")
-        self.startENButton.setIcon(QIcon(QPixmap("./icons/english.png")))
-        layout.addWidget(self.startENButton, 0, 2, 1, 1)
-        self.startENButton.clicked.connect(
-            functools.partial(self.handleStartSignal, "EN", False)
-        )
-
-        self.PDFParserButton = QPushButton("PDF解析", self)
-        layout.addWidget(self.PDFParserButton, 0, 3, 1, 1)
-        self.PDFParserButton.clicked.connect(
-            functools.partial(self.handleStartSignal, "CN", True)
-        )
-
-        self.showResultButton = QPushButton("显示结果")
-        self.showResultButton.setIcon(QIcon(QPixmap("./icons/folder-open.png")))
-        layout.addWidget(self.showResultButton, 0, 4, 1, 1)
-        self.showResultButton.clicked.connect(self.handleShowResultSignal)
-
-        # ProgressBar
-        layout.addWidget(self.pb, 2, 0, 1, 5)
-        # time estimate label
-        self.timeEstLabel = QLabel(("Time Left: --"))
-        layout.addWidget(self.timeEstLabel, 3, 0, 1, 5)
-
-        self.setLayout(layout)
-
-    def downloadModels(self, URLs):
-        # using custom model
-        tar_file_name_list = [
-            "inference.pdiparams",
-            "inference.pdiparams.info",
-            "inference.pdmodel",
-            "model.pdiparams",
-            "model.pdiparams.info",
-            "model.pdmodel",
-        ]
-        model_path = os.path.join(root, "inference")
-        os.makedirs(model_path, exist_ok=True)
-
-        # download and unzip models
-        for name in URLs.keys():
-            url = URLs[name]
-            print("Try downloading file: {}".format(url))
-            tarname = url.split("/")[-1]
-            tarpath = os.path.join(model_path, tarname)
-            if os.path.exists(tarpath):
-                print("File have already exist. skip")
-            else:
-                try:
-                    download_with_progressbar(url, tarpath)
-                except Exception as e:
-                    print("Error occurred when downloading file, error message:")
-                    print(e)
-
-            # unzip model tar
-            try:
-                with tarfile.open(tarpath, "r") as tarObj:
-                    storage_dir = os.path.join(model_path, name)
-                    os.makedirs(storage_dir, exist_ok=True)
-                    for member in tarObj.getmembers():
-                        filename = None
-                        for tar_file_name in tar_file_name_list:
-                            if tar_file_name in member.name:
-                                filename = tar_file_name
-                        if filename is None:
-                            continue
-                        file = tarObj.extractfile(member)
-                        with open(os.path.join(storage_dir, filename), "wb") as f:
-                            f.write(file.read())
-            except Exception as e:
-                print("Error occurred when unziping file, error message:")
-                print(e)
-
-    def initPredictor(self, lang="EN"):
-        # init predictor args
-        args = parse_args()
-        args.table_max_len = 488
-        args.ocr = True
-        args.recovery = True
-        args.save_pdf = self.save_pdf
-        args.table_char_dict_path = os.path.join(
-            root, "ppocr", "utils", "dict", "table_structure_dict.txt"
-        )
-        if lang == "EN":
-            args.det_model_dir = os.path.join(
-                root, "inference", "en_PP-OCRv3_det_infer"  # 此处从这里找到模型存放位置
-            )
-            args.rec_model_dir = os.path.join(
-                root, "inference", "en_PP-OCRv3_rec_infer"
-            )
-            args.table_model_dir = os.path.join(
-                root, "inference", "en_ppstructure_mobile_v2.0_SLANet_infer"
-            )
-            args.output = os.path.join(root, "output")  # 结果保存路径
-            args.layout_model_dir = os.path.join(
-                root, "inference", "picodet_lcnet_x1_0_fgd_layout_infer"
-            )
-            lang_dict = DICT_EN
-        elif lang == "CN":
-            args.det_model_dir = os.path.join(
-                root, "inference", "cn_PP-OCRv3_det_infer"  # 此处从这里找到模型存放位置
-            )
-            args.rec_model_dir = os.path.join(
-                root, "inference", "cn_PP-OCRv3_rec_infer"
-            )
-            args.table_model_dir = os.path.join(
-                root, "inference", "cn_ppstructure_mobile_v2.0_SLANet_infer"
-            )
-            args.output = os.path.join(root, "output")  # 结果保存路径
-            args.layout_model_dir = os.path.join(
-                root, "inference", "picodet_lcnet_x1_0_fgd_layout_cdla_infer"
-            )
-            lang_dict = DICT_CN
-        else:
-            raise ValueError("Unsupported language")
-        args.rec_char_dict_path = os.path.join(
-            root, "ppocr", "utils", lang_dict["rec_char_dict_path"]
-        )
-        args.layout_dict_path = os.path.join(
-            root, "ppocr", "utils", "dict", "layout_dict", lang_dict["layout_dict_path"]
-        )
-        # init predictor
-        return StructureSystem(args)
-
-    def handleOpenFileSignal(self):
-        """
-        可以多选图像文件
-        """
-        selectedFiles = QFileDialog.getOpenFileNames(
-            self, "多文件选择", "/", "图片文件 (*.png *.jpeg *.jpg *.bmp *.pdf)"
-        )[0]
-        if len(selectedFiles) > 0:
-            self.imagePaths = selectedFiles
-            self.screenShot = None  # discard screenshot temp image
-            self.pb.setValue(0)
-
-    # def screenShotSlot(self):
-    #     '''
-    #     选定图像文件和截图的转换过程只能同时进行一个
-    #     截图只能同时转换一个
-    #     '''
-    #     self.screenShotWg.start()
-    #     if self.screenShotWg.captureImage:
-    #         self.screenShot = self.screenShotWg.captureImage
-    #         self.imagePaths.clear() # discard openfile temp list
-    #         self.pb.setRange(0, 1)
-    #         self.pb.setValue(0)
-
-    def handleStartSignal(self, lang="EN", pdfParser=False):
-        if self.screenShot:  # for screenShot
-            img_name = "screenshot_" + time.strftime("%Y%m%d%H%M%S", time.localtime())
-            image = QImageToCvMat(self.screenShot)
-            self.predictAndSave(image, img_name, lang)
-            # update Progress Bar
-            self.pb.setValue(1)
-            QMessageBox.information(self, "Information", "文档提取完成")
-        elif len(self.imagePaths) > 0:  # for image file selection
-            # Must set image path list and language before start
-            self.output_dir = os.path.join(
-                os.path.dirname(self.imagePaths[0]), "output"
-            )  # output_dir shold be same as imagepath
-            self._thread.setOutputDir(self.output_dir)
-            self._thread.setImagePath(self.imagePaths)
-            self._thread.setLang(lang)
-            self._thread.setPDFParser(pdfParser)
-            # disenble buttons
-            self.openFileButton.setEnabled(False)
-            self.startCNButton.setEnabled(False)
-            self.startENButton.setEnabled(False)
-            self.PDFParserButton.setEnabled(False)
-            # 启动工作进程
-            self._thread.start()
-            self.time_start = time.time()  # log start time
-            QMessageBox.information(self, "Information", "开始转换")
-        else:
-            QMessageBox.warning(self, "Information", "请选择要识别的文件或截图")
-
-    def handleShowResultSignal(self):
-        if self.output_dir is None:
-            return
-        if os.path.exists(self.output_dir):
-            if platform.system() == "Windows":
-                os.startfile(self.output_dir)
-            else:
-                os.system("open " + os.path.normpath(self.output_dir))
-        else:
-            QMessageBox.information(self, "Information", "输出文件不存在")
-
-    def handleProgressBarUpdateSingal(self, i):
-        self.pb.setValue(i)
-        # calculate time left of recognition
-        lenbar = self.pb.maximum()
-        avg_time = (
-            time.time() - self.time_start
-        ) / i  # Use average time to prevent time fluctuations
-        time_left = str(datetime.timedelta(seconds=avg_time * (lenbar - i))).split(".")[
-            0
-        ]  # Remove microseconds
-        self.timeEstLabel.setText(f"Time Left: {time_left}")  # show time left
-
-    def handleProgressBarRangeSingal(self, max):
-        self.pb.setRange(0, max)
-
-    def handleEndsignalSignal(self):
-        # enble buttons
-        self.openFileButton.setEnabled(True)
-        self.startCNButton.setEnabled(True)
-        self.startENButton.setEnabled(True)
-        self.PDFParserButton.setEnabled(True)
-        QMessageBox.information(self, "Information", "转换结束")
-
-    def handleCBChangeSignal(self):
-        self._thread.setPDFParser(self.checkBox.isChecked())
-
-    def handleThreadException(self, message):
-        self._thread.quit()
-        QMessageBox.information(self, "Error", message)
-
-
-def main():
-    app = QApplication(sys.argv)
-
-    window = APP_Image2Doc()  # 创建对象
-    window.show()  # 全屏显示窗口
-
-    QApplication.processEvents()
-    sys.exit(app.exec())
-
-
-if __name__ == "__main__":
-    main()
+# copyright (c) 2022 PaddlePaddle Authors. All Rights Reserve.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import tarfile
+import os
+import time
+import datetime
+import functools
+import cv2
+import platform
+import numpy as np
+from paddle.utils import try_import
+
+fitz = try_import("fitz")
+from PIL import Image
+from qtpy.QtWidgets import (
+    QApplication,
+    QWidget,
+    QPushButton,
+    QProgressBar,
+    QGridLayout,
+    QMessageBox,
+    QLabel,
+    QFileDialog,
+    QCheckBox,
+)
+from qtpy.QtCore import Signal, QThread, QObject
+from qtpy.QtGui import QImage, QPixmap, QIcon
+
+file = os.path.dirname(os.path.abspath(__file__))
+root = os.path.abspath(os.path.join(file, "../../"))
+sys.path.append(file)
+sys.path.insert(0, root)
+
+from ppstructure.predict_system import StructureSystem, save_structure_res
+from ppstructure.utility import parse_args, draw_structure_result
+from ppocr.utils.network import download_with_progressbar
+from ppstructure.recovery.recovery_to_doc import sorted_layout_boxes, convert_info_docx
+
+# from ScreenShotWidget import ScreenShotWidget
+
+__APPNAME__ = "pdf2word"
+__VERSION__ = "0.2.2"
+
+URLs_EN = {
+    # 下载超英文轻量级PP-OCRv3模型的检测模型并解压
+    "en_PP-OCRv3_det_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar",
+    # 下载英文轻量级PP-OCRv3模型的识别模型并解压
+    "en_PP-OCRv3_rec_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar",
+    # 下载超轻量级英文表格英文模型并解压
+    "en_ppstructure_mobile_v2.0_SLANet_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/slanet/en_ppstructure_mobile_v2.0_SLANet_infer.tar",
+    # 英文版面分析模型
+    "picodet_lcnet_x1_0_fgd_layout_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/layout/picodet_lcnet_x1_0_fgd_layout_infer.tar",
+}
+DICT_EN = {
+    "rec_char_dict_path": "en_dict.txt",
+    "layout_dict_path": "layout_publaynet_dict.txt",
+}
+
+URLs_CN = {
+    # 下载超中文轻量级PP-OCRv3模型的检测模型并解压
+    "cn_PP-OCRv3_det_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar",
+    # 下载中文轻量级PP-OCRv3模型的识别模型并解压
+    "cn_PP-OCRv3_rec_infer": "https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar",
+    # 下载超轻量级英文表格英文模型并解压
+    "cn_ppstructure_mobile_v2.0_SLANet_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/slanet/en_ppstructure_mobile_v2.0_SLANet_infer.tar",
+    # 中文版面分析模型
+    "picodet_lcnet_x1_0_fgd_layout_cdla_infer": "https://paddleocr.bj.bcebos.com/ppstructure/models/layout/picodet_lcnet_x1_0_fgd_layout_cdla_infer.tar",
+}
+DICT_CN = {
+    "rec_char_dict_path": "ppocr_keys_v1.txt",
+    "layout_dict_path": "layout_cdla_dict.txt",
+}
+
+
+def QImageToCvMat(incomingImage) -> np.array:
+    """
+    Converts a QImage into an opencv MAT format
+    """
+
+    incomingImage = incomingImage.convertToFormat(QImage.Format.Format_RGBA8888)
+
+    width = incomingImage.width()
+    height = incomingImage.height()
+
+    ptr = incomingImage.bits()
+    ptr.setsize(height * width * 4)
+    arr = np.frombuffer(ptr, np.uint8).reshape((height, width, 4))
+    return arr
+
+
+def readImage(image_file) -> list:
+    if os.path.basename(image_file)[-3:] == "pdf":
+        imgs = []
+        with fitz.open(image_file) as pdf:
+            for pg in range(0, pdf.pageCount):
+                page = pdf[pg]
+                mat = fitz.Matrix(2, 2)
+                pm = page.getPixmap(matrix=mat, alpha=False)
+
+                # if width or height > 2000 pixels, don't enlarge the image
+                if pm.width > 2000 or pm.height > 2000:
+                    pm = page.getPixmap(matrix=fitz.Matrix(1, 1), alpha=False)
+
+                img = Image.frombytes("RGB", [pm.width, pm.height], pm.samples)
+                img = cv2.cvtColor(np.array(img), cv2.COLOR_RGB2BGR)
+                imgs.append(img)
+    else:
+        img = cv2.imread(image_file, cv2.IMREAD_COLOR)
+        if img is not None:
+            imgs = [img]
+
+    return imgs
+
+
+class Worker(QThread):
+    progressBarValue = Signal(int)
+    progressBarRange = Signal(int)
+    endsignal = Signal()
+    exceptedsignal = Signal(str)  # 发送一个异常信号
+    loopFlag = True
+
+    def __init__(self, predictors, save_pdf, vis_font_path, use_pdf2docx_api):
+        super(Worker, self).__init__()
+        self.predictors = predictors
+        self.save_pdf = save_pdf
+        self.vis_font_path = vis_font_path
+        self.lang = "EN"
+        self.imagePaths = []
+        self.use_pdf2docx_api = use_pdf2docx_api
+        self.outputDir = None
+        self.totalPageCnt = 0
+        self.pageCnt = 0
+        self.setStackSize(1024 * 1024)
+
+    def setImagePath(self, imagePaths):
+        self.imagePaths = imagePaths
+
+    def setLang(self, lang):
+        self.lang = lang
+
+    def setOutputDir(self, outputDir):
+        self.outputDir = outputDir
+
+    def setPDFParser(self, enabled):
+        self.use_pdf2docx_api = enabled
+
+    def resetPageCnt(self):
+        self.pageCnt = 0
+
+    def resetTotalPageCnt(self):
+        self.totalPageCnt = 0
+
+    def ppocrPrecitor(self, imgs, img_name):
+        all_res = []
+        # update progress bar ranges
+        self.totalPageCnt += len(imgs)
+        self.progressBarRange.emit(self.totalPageCnt)
+        # processing pages
+        for index, img in enumerate(imgs):
+            res, time_dict = self.predictors[self.lang](img)
+
+            # save output
+            save_structure_res(res, self.outputDir, img_name)
+            # draw_img = draw_structure_result(img, res, self.vis_font_path)
+            # img_save_path = os.path.join(self.outputDir, img_name, 'show_{}.jpg'.format(index))
+            # if res != []:
+            #     cv2.imwrite(img_save_path, draw_img)
+
+            # recovery
+            h, w, _ = img.shape
+            res = sorted_layout_boxes(res, w)
+            all_res += res
+            self.pageCnt += 1
+            self.progressBarValue.emit(self.pageCnt)
+
+        if all_res != []:
+            try:
+                convert_info_docx(imgs, all_res, self.outputDir, img_name)
+            except Exception as ex:
+                print(
+                    "error in layout recovery image:{}, err msg: {}".format(
+                        img_name, ex
+                    )
+                )
+        print("Predict time : {:.3f}s".format(time_dict["all"]))
+        print("result save to {}".format(self.outputDir))
+
+    def run(self):
+        self.resetPageCnt()
+        self.resetTotalPageCnt()
+        try:
+            os.makedirs(self.outputDir, exist_ok=True)
+            for i, image_file in enumerate(self.imagePaths):
+                if not self.loopFlag:
+                    break
+                # using use_pdf2docx_api for PDF parsing
+                if self.use_pdf2docx_api and os.path.basename(image_file)[-3:] == "pdf":
+                    try_import("pdf2docx")
+                    from pdf2docx.converter import Converter
+
+                    self.totalPageCnt += 1
+                    self.progressBarRange.emit(self.totalPageCnt)
+                    print("===============using use_pdf2docx_api===============")
+                    img_name = os.path.basename(image_file).split(".")[0]
+                    docx_file = os.path.join(self.outputDir, "{}.docx".format(img_name))
+                    cv = Converter(image_file)
+                    cv.convert(docx_file)
+                    cv.close()
+                    print("docx save to {}".format(docx_file))
+                    self.pageCnt += 1
+                    self.progressBarValue.emit(self.pageCnt)
+                else:
+                    # using PPOCR for PDF/Image parsing
+                    imgs = readImage(image_file)
+                    if len(imgs) == 0:
+                        continue
+                    img_name = os.path.basename(image_file).split(".")[0]
+                    os.makedirs(os.path.join(self.outputDir, img_name), exist_ok=True)
+                    self.ppocrPrecitor(imgs, img_name)
+                # file processed
+            self.endsignal.emit()
+            # self.exec()
+        except Exception as e:
+            self.exceptedsignal.emit(str(e))  # 将异常发送给UI进程
+
+
+class APP_Image2Doc(QWidget):
+    def __init__(self):
+        super().__init__()
+        # self.setFixedHeight(100)
+        # self.setFixedWidth(520)
+
+        # settings
+        self.imagePaths = []
+        # self.screenShotWg = ScreenShotWidget()
+        self.screenShot = None
+        self.save_pdf = False
+        self.output_dir = None
+        self.vis_font_path = os.path.join(root, "doc", "fonts", "simfang.ttf")
+        self.use_pdf2docx_api = False
+
+        # ProgressBar
+        self.pb = QProgressBar()
+        self.pb.setRange(0, 100)
+        self.pb.setValue(0)
+
+        # 初始化界面
+        self.setupUi()
+
+        # 下载模型
+        self.downloadModels(URLs_EN)
+        self.downloadModels(URLs_CN)
+
+        # 初始化模型
+        predictors = {
+            "EN": self.initPredictor("EN"),
+            "CN": self.initPredictor("CN"),
+        }
+
+        # 设置工作进程
+        self._thread = Worker(
+            predictors, self.save_pdf, self.vis_font_path, self.use_pdf2docx_api
+        )
+        self._thread.progressBarValue.connect(self.handleProgressBarUpdateSingal)
+        self._thread.endsignal.connect(self.handleEndsignalSignal)
+        # self._thread.finished.connect(QObject.deleteLater)
+        self._thread.progressBarRange.connect(self.handleProgressBarRangeSingal)
+        self._thread.exceptedsignal.connect(self.handleThreadException)
+        self.time_start = 0  # save start time
+
+    def setupUi(self):
+        self.setObjectName("MainWindow")
+        self.setWindowTitle(__APPNAME__ + " " + __VERSION__)
+
+        layout = QGridLayout()
+
+        self.openFileButton = QPushButton("打开文件")
+        self.openFileButton.setIcon(QIcon(QPixmap("./icons/folder-plus.png")))
+        layout.addWidget(self.openFileButton, 0, 0, 1, 1)
+        self.openFileButton.clicked.connect(self.handleOpenFileSignal)
+
+        # screenShotButton = QPushButton("截图识别")
+        # layout.addWidget(screenShotButton, 0, 1, 1, 1)
+        # screenShotButton.clicked.connect(self.screenShotSlot)
+        # screenShotButton.setEnabled(False) # temporarily disenble
+
+        self.startCNButton = QPushButton("中文转换")
+        self.startCNButton.setIcon(QIcon(QPixmap("./icons/chinese.png")))
+        layout.addWidget(self.startCNButton, 0, 1, 1, 1)
+        self.startCNButton.clicked.connect(
+            functools.partial(self.handleStartSignal, "CN", False)
+        )
+
+        self.startENButton = QPushButton("英文转换")
+        self.startENButton.setIcon(QIcon(QPixmap("./icons/english.png")))
+        layout.addWidget(self.startENButton, 0, 2, 1, 1)
+        self.startENButton.clicked.connect(
+            functools.partial(self.handleStartSignal, "EN", False)
+        )
+
+        self.PDFParserButton = QPushButton("PDF解析", self)
+        layout.addWidget(self.PDFParserButton, 0, 3, 1, 1)
+        self.PDFParserButton.clicked.connect(
+            functools.partial(self.handleStartSignal, "CN", True)
+        )
+
+        self.showResultButton = QPushButton("显示结果")
+        self.showResultButton.setIcon(QIcon(QPixmap("./icons/folder-open.png")))
+        layout.addWidget(self.showResultButton, 0, 4, 1, 1)
+        self.showResultButton.clicked.connect(self.handleShowResultSignal)
+
+        # ProgressBar
+        layout.addWidget(self.pb, 2, 0, 1, 5)
+        # time estimate label
+        self.timeEstLabel = QLabel(("Time Left: --"))
+        layout.addWidget(self.timeEstLabel, 3, 0, 1, 5)
+
+        self.setLayout(layout)
+
+    def downloadModels(self, URLs):
+        # using custom model
+        tar_file_name_list = [
+            "inference.pdiparams",
+            "inference.pdiparams.info",
+            "inference.pdmodel",
+            "model.pdiparams",
+            "model.pdiparams.info",
+            "model.pdmodel",
+        ]
+        model_path = os.path.join(root, "inference")
+        os.makedirs(model_path, exist_ok=True)
+
+        # download and unzip models
+        for name in URLs.keys():
+            url = URLs[name]
+            print("Try downloading file: {}".format(url))
+            tarname = url.split("/")[-1]
+            tarpath = os.path.join(model_path, tarname)
+            if os.path.exists(tarpath):
+                print("File have already exist. skip")
+            else:
+                try:
+                    download_with_progressbar(url, tarpath)
+                except Exception as e:
+                    print("Error occurred when downloading file, error message:")
+                    print(e)
+
+            # unzip model tar
+            try:
+                with tarfile.open(tarpath, "r") as tarObj:
+                    storage_dir = os.path.join(model_path, name)
+                    os.makedirs(storage_dir, exist_ok=True)
+                    for member in tarObj.getmembers():
+                        filename = None
+                        for tar_file_name in tar_file_name_list:
+                            if tar_file_name in member.name:
+                                filename = tar_file_name
+                        if filename is None:
+                            continue
+                        file = tarObj.extractfile(member)
+                        with open(os.path.join(storage_dir, filename), "wb") as f:
+                            f.write(file.read())
+            except Exception as e:
+                print("Error occurred when unziping file, error message:")
+                print(e)
+
+    def initPredictor(self, lang="EN"):
+        # init predictor args
+        args = parse_args()
+        args.table_max_len = 488
+        args.ocr = True
+        args.recovery = True
+        args.save_pdf = self.save_pdf
+        args.table_char_dict_path = os.path.join(
+            root, "ppocr", "utils", "dict", "table_structure_dict.txt"
+        )
+        if lang == "EN":
+            args.det_model_dir = os.path.join(
+                root, "inference", "en_PP-OCRv3_det_infer"  # 此处从这里找到模型存放位置
+            )
+            args.rec_model_dir = os.path.join(
+                root, "inference", "en_PP-OCRv3_rec_infer"
+            )
+            args.table_model_dir = os.path.join(
+                root, "inference", "en_ppstructure_mobile_v2.0_SLANet_infer"
+            )
+            args.output = os.path.join(root, "output")  # 结果保存路径
+            args.layout_model_dir = os.path.join(
+                root, "inference", "picodet_lcnet_x1_0_fgd_layout_infer"
+            )
+            lang_dict = DICT_EN
+        elif lang == "CN":
+            args.det_model_dir = os.path.join(
+                root, "inference", "cn_PP-OCRv3_det_infer"  # 此处从这里找到模型存放位置
+            )
+            args.rec_model_dir = os.path.join(
+                root, "inference", "cn_PP-OCRv3_rec_infer"
+            )
+            args.table_model_dir = os.path.join(
+                root, "inference", "cn_ppstructure_mobile_v2.0_SLANet_infer"
+            )
+            args.output = os.path.join(root, "output")  # 结果保存路径
+            args.layout_model_dir = os.path.join(
+                root, "inference", "picodet_lcnet_x1_0_fgd_layout_cdla_infer"
+            )
+            lang_dict = DICT_CN
+        else:
+            raise ValueError("Unsupported language")
+        args.rec_char_dict_path = os.path.join(
+            root, "ppocr", "utils", lang_dict["rec_char_dict_path"]
+        )
+        args.layout_dict_path = os.path.join(
+            root, "ppocr", "utils", "dict", "layout_dict", lang_dict["layout_dict_path"]
+        )
+        # init predictor
+        return StructureSystem(args)
+
+    def handleOpenFileSignal(self):
+        """
+        可以多选图像文件
+        """
+        selectedFiles = QFileDialog.getOpenFileNames(
+            self, "多文件选择", "/", "图片文件 (*.png *.jpeg *.jpg *.bmp *.pdf)"
+        )[0]
+        if len(selectedFiles) > 0:
+            self.imagePaths = selectedFiles
+            self.screenShot = None  # discard screenshot temp image
+            self.pb.setValue(0)
+
+    # def screenShotSlot(self):
+    #     '''
+    #     选定图像文件和截图的转换过程只能同时进行一个
+    #     截图只能同时转换一个
+    #     '''
+    #     self.screenShotWg.start()
+    #     if self.screenShotWg.captureImage:
+    #         self.screenShot = self.screenShotWg.captureImage
+    #         self.imagePaths.clear() # discard openfile temp list
+    #         self.pb.setRange(0, 1)
+    #         self.pb.setValue(0)
+
+    def handleStartSignal(self, lang="EN", pdfParser=False):
+        if self.screenShot:  # for screenShot
+            img_name = "screenshot_" + time.strftime("%Y%m%d%H%M%S", time.localtime())
+            image = QImageToCvMat(self.screenShot)
+            self.predictAndSave(image, img_name, lang)
+            # update Progress Bar
+            self.pb.setValue(1)
+            QMessageBox.information(self, "Information", "文档提取完成")
+        elif len(self.imagePaths) > 0:  # for image file selection
+            # Must set image path list and language before start
+            self.output_dir = os.path.join(
+                os.path.dirname(self.imagePaths[0]), "output"
+            )  # output_dir shold be same as imagepath
+            self._thread.setOutputDir(self.output_dir)
+            self._thread.setImagePath(self.imagePaths)
+            self._thread.setLang(lang)
+            self._thread.setPDFParser(pdfParser)
+            # disenble buttons
+            self.openFileButton.setEnabled(False)
+            self.startCNButton.setEnabled(False)
+            self.startENButton.setEnabled(False)
+            self.PDFParserButton.setEnabled(False)
+            # 启动工作进程
+            self._thread.start()
+            self.time_start = time.time()  # log start time
+            QMessageBox.information(self, "Information", "开始转换")
+        else:
+            QMessageBox.warning(self, "Information", "请选择要识别的文件或截图")
+
+    def handleShowResultSignal(self):
+        if self.output_dir is None:
+            return
+        if os.path.exists(self.output_dir):
+            if platform.system() == "Windows":
+                os.startfile(self.output_dir)
+            else:
+                os.system("open " + os.path.normpath(self.output_dir))
+        else:
+            QMessageBox.information(self, "Information", "输出文件不存在")
+
+    def handleProgressBarUpdateSingal(self, i):
+        self.pb.setValue(i)
+        # calculate time left of recognition
+        lenbar = self.pb.maximum()
+        avg_time = (
+            time.time() - self.time_start
+        ) / i  # Use average time to prevent time fluctuations
+        time_left = str(datetime.timedelta(seconds=avg_time * (lenbar - i))).split(".")[
+            0
+        ]  # Remove microseconds
+        self.timeEstLabel.setText(f"Time Left: {time_left}")  # show time left
+
+    def handleProgressBarRangeSingal(self, max):
+        self.pb.setRange(0, max)
+
+    def handleEndsignalSignal(self):
+        # enble buttons
+        self.openFileButton.setEnabled(True)
+        self.startCNButton.setEnabled(True)
+        self.startENButton.setEnabled(True)
+        self.PDFParserButton.setEnabled(True)
+        QMessageBox.information(self, "Information", "转换结束")
+
+    def handleCBChangeSignal(self):
+        self._thread.setPDFParser(self.checkBox.isChecked())
+
+    def handleThreadException(self, message):
+        self._thread.quit()
+        QMessageBox.information(self, "Error", message)
+
+
+def main():
+    app = QApplication(sys.argv)
+
+    window = APP_Image2Doc()  # 创建对象
+    window.show()  # 全屏显示窗口
+
+    QApplication.processEvents()
+    sys.exit(app.exec())
+
+
+if __name__ == "__main__":
+    main()
diff --git a/ppstructure/table/README_ch.md b/ppstructure/table/README_ch.md
index b8817523c..cf4e515b5 100644
--- a/ppstructure/table/README_ch.md
+++ b/ppstructure/table/README_ch.md
@@ -40,7 +40,7 @@
 | --- | --- | --- | ---|
 | EDD<sup>[2]</sup> |x| 88.30% |x|
 | TableRec-RARE(ours) | 71.73%| 93.88% |779ms|
-| SLANet(ours) |76.31%|	95.89%|766ms|
+| SLANet(ours) |76.31%| 95.89%|766ms|
 
 性能指标解释如下:
 - Acc: 模型对每张图像里表格结构的识别准确率,错一个token就算错误。
diff --git a/ppstructure/table/predict_structure.py b/ppstructure/table/predict_structure.py
index 93a930b27..3881f9fb4 100755
--- a/ppstructure/table/predict_structure.py
+++ b/ppstructure/table/predict_structure.py
@@ -45,12 +45,16 @@ def build_pre_process_list(args):
     pad_op = {"PaddingTableImage": {"size": [args.table_max_len, args.table_max_len]}}
     normalize_op = {
         "NormalizeImage": {
-            "std": [0.229, 0.224, 0.225]
-            if args.table_algorithm not in ["TableMaster"]
-            else [0.5, 0.5, 0.5],
-            "mean": [0.485, 0.456, 0.406]
-            if args.table_algorithm not in ["TableMaster"]
-            else [0.5, 0.5, 0.5],
+            "std": (
+                [0.229, 0.224, 0.225]
+                if args.table_algorithm not in ["TableMaster"]
+                else [0.5, 0.5, 0.5]
+            ),
+            "mean": (
+                [0.485, 0.456, 0.406]
+                if args.table_algorithm not in ["TableMaster"]
+                else [0.5, 0.5, 0.5]
+            ),
             "scale": "1./255.",
             "order": "hwc",
         }
diff --git a/test_tipc/common_func.sh b/test_tipc/common_func.sh
index 1bbf82916..9ec22f03a 100644
--- a/test_tipc/common_func.sh
+++ b/test_tipc/common_func.sh
@@ -65,4 +65,3 @@ function status_check(){
         echo -e "\033[33m Run failed with command - ${model_name} - ${run_command} - ${log_path} \033[0m" | tee -a ${run_log}
     fi
 }
-
diff --git a/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index a0c49a081..2a432b383 100644
--- a/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppoc
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 24eb620ee..6a85f9fdc 100644
--- a/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_system.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
diff --git a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 7eccbd725..a723fff2b 100644
--- a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
index 37ad2380e..003d77635 100644
--- a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
@@ -10,4 +10,4 @@ null:null
 --image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
 --config_dir:./config.txt
 null:null
---benchmark:True
\ No newline at end of file
+--benchmark:True
diff --git a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 2e7906076..834aef4fc 100644
--- a/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_det.py
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/
diff --git a/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 1975e099d..038223a2f 100644
--- a/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 43ef97d50..2106672a2 100644
--- a/test_tipc/configs/ch_PP-OCRv2_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index b1bff00b0..c616e41d2 100644
--- a/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index e374a5d82..003d5939f 100644
--- a/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_rec.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/rec_inference/
\ No newline at end of file
+--image_dir:./inference/rec_inference/
diff --git a/test_tipc/configs/ch_PP-OCRv2_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 95e4062d1..cdf6cb31a 100644
--- a/test_tipc/configs/ch_PP-OCRv2_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv2_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv2_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index b807eadd3..73943d4bd 100644
--- a/test_tipc/configs/ch_PP-OCRv2_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv2_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 794af27d9..bb96d768b 100644
--- a/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ inference:./deploy/cpp_infer/build/ppocr --rec_img_h=48 --rec_char_dict_path=./p
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index bf2556ef1..d7c59817a 100644
--- a/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_system.py --rec_image_shape="3,48,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
diff --git a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index aecd0dd43..998c33632 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
index cbc101f93..a9c6bdef6 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
@@ -10,4 +10,4 @@ null:null
 --image_dir:./test_data/icdar2015_lite/text_localization/ch4_test_images/
 --config_dir:./config.txt
 null:null
---benchmark:True
\ No newline at end of file
+--benchmark:True
diff --git a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index a448713b1..80c24a611 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_det.py
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/
diff --git a/test_tipc/configs/ch_PP-OCRv3_det/train_infer_python.txt b/test_tipc/configs/ch_PP-OCRv3_det/train_infer_python.txt
index 8daab48a4..258b556b3 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det/train_infer_python.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/configs/ch_PP-OCRv3_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index a34ffe22a..afaa01576 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 3198b8755..794e97972 100644
--- a/test_tipc/configs/ch_PP-OCRv3_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 9d6ca2cf5..f71785912 100644
--- a/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 9114c0acf..27a0d07fb 100644
--- a/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_rec.py --rec_image_shape="3,48,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/rec_inference/
\ No newline at end of file
+--image_dir:./inference/rec_inference/
diff --git a/test_tipc/configs/ch_PP-OCRv3_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index f1a308fcc..ac7e1a91c 100644
--- a/test_tipc/configs/ch_PP-OCRv3_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv3_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_PP-OCRv3_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 8fc1132ca..dcc0c9354 100644
--- a/test_tipc/configs/ch_PP-OCRv3_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_PP-OCRv3_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_PP-OCRv4_mobile_det/train_infer_python.txt b/test_tipc/configs/ch_PP-OCRv4_mobile_det/train_infer_python.txt
index 3635c0c6f..f5ec70644 100644
--- a/test_tipc/configs/ch_PP-OCRv4_mobile_det/train_infer_python.txt
+++ b/test_tipc/configs/ch_PP-OCRv4_mobile_det/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/configs/ch_PP-OCRv4_server_det/train_infer_python.txt b/test_tipc/configs/ch_PP-OCRv4_server_det/train_infer_python.txt
index 315fac982..6e8db603a 100644
--- a/test_tipc/configs/ch_PP-OCRv4_server_det/train_infer_python.txt
+++ b/test_tipc/configs/ch_PP-OCRv4_server_det/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 73f1d4985..65312a750 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppoc
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 3e01ae573..ec6135f54 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_system.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index ded332e67..8dd77188a 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 8f36ad4b8..37ceec379 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_det.py
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
index bf81d0baa..a3bcbe187 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
index df71e9070..537d15e3e 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt
index a3f6933a6..f338e0216 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt
@@ -49,4 +49,3 @@ inference:tools/infer/predict_det.py
 null:null
 --benchmark:True
 null:null
-
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_infer_python.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_infer_python.txt
index 0f6df1ac5..8b6dfbe52 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_infer_python.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_infer_python.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
index 2014c6dbc..279b42862 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index f0e58dd56..6ee80f64f 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 513233059..a8066dd7a 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_det_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 332e632bd..a8342c5dd 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 78b76edae..79da03e62 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_rec.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/rec_inference/
\ No newline at end of file
+--image_dir:./inference/rec_inference/
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_infer_python.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_infer_python.txt
index 94c950310..d99ad00e8 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_infer_python.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_infer_python.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,32,320]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,32,320]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
index 71555865a..6754f7354 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_FPGM/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ null:null
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,32,320]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,32,320]}]
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index ef4c93fcd..7669b44f6 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_KL/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 74ca7b50b..d1f7c48fc 100644
--- a/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_mobile_v2_0_rec_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index ba8646fd9..f74289221 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ inference:./deploy/cpp_infer/build/ppocr --rec_char_dict_path=./ppocr/utils/ppoc
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 9e2cf191f..cd2d85715 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_system.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 4a30affd0..53e6ab19c 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:False
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index b7dd6e22b..2c8a9b311 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_det.py
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
\ No newline at end of file
+--image_dir:./inference/ch_det_data_50/all-sum-510/00008790.jpg
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
index f398078fc..e4e2f2e38 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_fleet_normal_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_det.py
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
index 7a2d0a53c..c09c163d0 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_det.py
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 3f3905516..f1f0a17bd 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:False
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 89b966100..227b6fd99 100644
--- a/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/ch_ppocr_server_v2_0_rec/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:tools/infer/predict_rec.py --rec_image_shape="3,32,320"
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./inference/rec_inference/
\ No newline at end of file
+--image_dir:./inference/rec_inference/
diff --git a/test_tipc/configs/det_mv3_db_v2_0/train_infer_python.txt b/test_tipc/configs/det_mv3_db_v2_0/train_infer_python.txt
index 62303b7e5..f6ee9df8b 100644
--- a/test_tipc/configs/det_mv3_db_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/det_mv3_db_v2_0/train_infer_python.txt
@@ -56,4 +56,4 @@ batch_size:16
 fp_items:fp32|fp16
 epoch:4
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
-flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
\ No newline at end of file
+flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
diff --git a/test_tipc/configs/det_mv3_east_v2_0/det_mv3_east.yml b/test_tipc/configs/det_mv3_east_v2_0/det_mv3_east.yml
index 4ae32ab00..461179e4e 100644
--- a/test_tipc/configs/det_mv3_east_v2_0/det_mv3_east.yml
+++ b/test_tipc/configs/det_mv3_east_v2_0/det_mv3_east.yml
@@ -106,4 +106,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/test_tipc/configs/det_r18_ct/train_infer_python.txt b/test_tipc/configs/det_r18_ct/train_infer_python.txt
index 5933fdbee..e02531209 100644
--- a/test_tipc/configs/det_r18_ct/train_infer_python.txt
+++ b/test_tipc/configs/det_r18_ct/train_infer_python.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_det.py
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,640,640]}];[{float32,[3,960,960]}]
diff --git a/test_tipc/configs/det_r50_db_v2_0/train_infer_python.txt b/test_tipc/configs/det_r50_db_v2_0/train_infer_python.txt
index 1d0d9693a..e5c1dd830 100644
--- a/test_tipc/configs/det_r50_db_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/det_r50_db_v2_0/train_infer_python.txt
@@ -56,4 +56,4 @@ batch_size:8
 fp_items:fp32|fp16
 epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
-flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
\ No newline at end of file
+flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
diff --git a/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/det_r50_vd_dcn_fce_ctw.yml b/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/det_r50_vd_dcn_fce_ctw.yml
index 29f6f32a5..cd355f193 100644
--- a/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/det_r50_vd_dcn_fce_ctw.yml
+++ b/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/det_r50_vd_dcn_fce_ctw.yml
@@ -136,4 +136,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/train_infer_python.txt b/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/train_infer_python.txt
index 92ded19d6..cc6080f06 100644
--- a/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/det_r50_dcn_fce_ctw_v2_0/train_infer_python.txt
@@ -56,4 +56,4 @@ batch_size:6
 fp_items:fp32|fp16
 epoch:1
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
-flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
\ No newline at end of file
+flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
diff --git a/test_tipc/configs/det_r50_vd_east_v2_0/det_r50_vd_east.yml b/test_tipc/configs/det_r50_vd_east_v2_0/det_r50_vd_east.yml
index 844f42e9a..ae7bd9461 100644
--- a/test_tipc/configs/det_r50_vd_east_v2_0/det_r50_vd_east.yml
+++ b/test_tipc/configs/det_r50_vd_east_v2_0/det_r50_vd_east.yml
@@ -105,4 +105,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/test_tipc/configs/det_r50_vd_east_v2_0/train_infer_python.txt b/test_tipc/configs/det_r50_vd_east_v2_0/train_infer_python.txt
index 5ee445a6c..e3ef8c623 100644
--- a/test_tipc/configs/det_r50_vd_east_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/det_r50_vd_east_v2_0/train_infer_python.txt
@@ -56,4 +56,4 @@ batch_size:8
 fp_items:fp32|fp16
 epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
-flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
\ No newline at end of file
+flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
diff --git a/test_tipc/configs/det_r50_vd_pse_v2_0/train_infer_python.txt b/test_tipc/configs/det_r50_vd_pse_v2_0/train_infer_python.txt
index 78d25f6b1..665890567 100644
--- a/test_tipc/configs/det_r50_vd_pse_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/det_r50_vd_pse_v2_0/train_infer_python.txt
@@ -56,4 +56,4 @@ batch_size:8
 fp_items:fp32|fp16
 epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
-flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
\ No newline at end of file
+flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
diff --git a/test_tipc/configs/det_r50_vd_sast_icdar15_v2_0/det_r50_vd_sast_icdar2015.yml b/test_tipc/configs/det_r50_vd_sast_icdar15_v2_0/det_r50_vd_sast_icdar2015.yml
index 4b7340ac5..805588402 100644
--- a/test_tipc/configs/det_r50_vd_sast_icdar15_v2_0/det_r50_vd_sast_icdar2015.yml
+++ b/test_tipc/configs/det_r50_vd_sast_icdar15_v2_0/det_r50_vd_sast_icdar2015.yml
@@ -108,4 +108,3 @@ Eval:
     drop_last: False
     batch_size_per_card: 1 # must be 1
     num_workers: 2
-
diff --git a/test_tipc/configs/det_r50_vd_sast_totaltext_v2_0/det_r50_vd_sast_totaltext.yml b/test_tipc/configs/det_r50_vd_sast_totaltext_v2_0/det_r50_vd_sast_totaltext.yml
index ef2b88455..6c6a9f1a2 100644
--- a/test_tipc/configs/det_r50_vd_sast_totaltext_v2_0/det_r50_vd_sast_totaltext.yml
+++ b/test_tipc/configs/det_r50_vd_sast_totaltext_v2_0/det_r50_vd_sast_totaltext.yml
@@ -105,4 +105,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 1 # must be 1
-    num_workers: 2
\ No newline at end of file
+    num_workers: 2
diff --git a/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index ad002a334..4dd41e32d 100644
--- a/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 068c4c6b1..e81bae8cc 100644
--- a/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/en_table_structure/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:ppstructure/table/predict_structure.py --table_char_dict_path=./ppocr/
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./ppstructure/docs/table/table.jpg
\ No newline at end of file
+--image_dir:./ppstructure/docs/table/table.jpg
diff --git a/test_tipc/configs/en_table_structure_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/en_table_structure_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 22f77c469..2f457dc55 100644
--- a/test_tipc/configs/en_table_structure_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/en_table_structure_PACT/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/rec_mv3_none_none_ctc_v2_0/train_infer_python.txt b/test_tipc/configs/rec_mv3_none_none_ctc_v2_0/train_infer_python.txt
index d91c55e88..0b66a9756 100644
--- a/test_tipc/configs/rec_mv3_none_none_ctc_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/rec_mv3_none_none_ctc_v2_0/train_infer_python.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dic
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,32,100]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,32,100]}]
diff --git a/test_tipc/configs/rec_r31_robustscanner/train_infer_python.txt b/test_tipc/configs/rec_r31_robustscanner/train_infer_python.txt
index 1bf8dc0b6..c2e2f2bf8 100644
--- a/test_tipc/configs/rec_r31_robustscanner/train_infer_python.txt
+++ b/test_tipc/configs/rec_r31_robustscanner/train_infer_python.txt
@@ -51,4 +51,3 @@ inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/dict90.t
 null:null
 ===========================infer_benchmark_params==========================
 random_infer_input:[{float32,[3,48,160]}]
-
diff --git a/test_tipc/configs/rec_r34_vd_none_bilstm_ctc_v2_0/train_infer_python.txt b/test_tipc/configs/rec_r34_vd_none_bilstm_ctc_v2_0/train_infer_python.txt
index b53efbd6b..b9c6b6f1c 100644
--- a/test_tipc/configs/rec_r34_vd_none_bilstm_ctc_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/rec_r34_vd_none_bilstm_ctc_v2_0/train_infer_python.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dic
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,32,100]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,32,100]}]
diff --git a/test_tipc/configs/rec_r34_vd_none_none_ctc_v2_0/train_infer_python.txt b/test_tipc/configs/rec_r34_vd_none_none_ctc_v2_0/train_infer_python.txt
index 7d953968b..cd336f0ad 100644
--- a/test_tipc/configs/rec_r34_vd_none_none_ctc_v2_0/train_infer_python.txt
+++ b/test_tipc/configs/rec_r34_vd_none_none_ctc_v2_0/train_infer_python.txt
@@ -50,4 +50,4 @@ inference:tools/infer/predict_rec.py --rec_char_dict_path=./ppocr/utils/ic15_dic
 --benchmark:True
 null:null
 ===========================infer_benchmark_params==========================
-random_infer_input:[{float32,[3,32,100]}]
\ No newline at end of file
+random_infer_input:[{float32,[3,32,100]}]
diff --git a/test_tipc/configs/rec_svtrnet/train_infer_python.txt b/test_tipc/configs/rec_svtrnet/train_infer_python.txt
index 63e6b908a..04ac03aca 100644
--- a/test_tipc/configs/rec_svtrnet/train_infer_python.txt
+++ b/test_tipc/configs/rec_svtrnet/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/configs/slanet/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt b/test_tipc/configs/slanet/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
index 1b4226706..49b69cd70 100644
--- a/test_tipc/configs/slanet/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
+++ b/test_tipc/configs/slanet/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
@@ -17,4 +17,4 @@ null:null
 --det:True
 --rec:True
 --cls:False
---use_angle_cls:False
\ No newline at end of file
+--use_angle_cls:False
diff --git a/test_tipc/configs/slanet/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt b/test_tipc/configs/slanet/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
index 45e4e9e85..99062b05b 100644
--- a/test_tipc/configs/slanet/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
+++ b/test_tipc/configs/slanet/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
@@ -14,4 +14,4 @@ inference:ppstructure/table/predict_structure.py --table_char_dict_path=./ppocr/
 --use_gpu:True|False
 --det_model_dir:
 --rec_model_dir:
---image_dir:./ppstructure/docs/table/table.jpg
\ No newline at end of file
+--image_dir:./ppstructure/docs/table/table.jpg
diff --git a/test_tipc/configs/slanet/train_infer_python.txt b/test_tipc/configs/slanet/train_infer_python.txt
index 0beebc04d..c5aa82701 100644
--- a/test_tipc/configs/slanet/train_infer_python.txt
+++ b/test_tipc/configs/slanet/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/configs/sr_telescope/sr_telescope.yml b/test_tipc/configs/sr_telescope/sr_telescope.yml
index c78a42d0e..774c19d79 100644
--- a/test_tipc/configs/sr_telescope/sr_telescope.yml
+++ b/test_tipc/configs/sr_telescope/sr_telescope.yml
@@ -81,4 +81,3 @@ Eval:
     drop_last: False
     batch_size_per_card: 16
     num_workers: 4
-
diff --git a/test_tipc/configs/table_master/table_master.yml b/test_tipc/configs/table_master/table_master.yml
index b27bdae54..cc96fbc86 100644
--- a/test_tipc/configs/table_master/table_master.yml
+++ b/test_tipc/configs/table_master/table_master.yml
@@ -130,4 +130,4 @@ Eval:
     shuffle: False
     drop_last: False
     batch_size_per_card: 10
-    num_workers: 8
\ No newline at end of file
+    num_workers: 8
diff --git a/test_tipc/configs/table_master/train_infer_python.txt b/test_tipc/configs/table_master/train_infer_python.txt
index a248cd822..a83a031e0 100644
--- a/test_tipc/configs/table_master/train_infer_python.txt
+++ b/test_tipc/configs/table_master/train_infer_python.txt
@@ -58,4 +58,4 @@ epoch:2
 --profiler_options:batch_range=[10,20];state=GPU;tracer_option=Default;profile_path=model.profile
 flags:FLAGS_eager_delete_tensor_gb=0.0;FLAGS_fraction_of_gpu_memory_to_use=0.98;FLAGS_conv_workspace_size_limit=4096
 ===========================to_static_train_benchmark_params===========================
-to_static_train:Global.to_static=true
\ No newline at end of file
+to_static_train:Global.to_static=true
diff --git a/test_tipc/docs/benchmark_train.md b/test_tipc/docs/benchmark_train.md
index 50cc13b92..1d51e6749 100644
--- a/test_tipc/docs/benchmark_train.md
+++ b/test_tipc/docs/benchmark_train.md
@@ -59,18 +59,18 @@ train_log/
 |模型名称|配置文件|大数据集 float32 fps |小数据集 float32 fps |diff |大数据集 float16 fps|小数据集 float16 fps| diff | 大数据集大小 | 小数据集大小 |
 |:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|:-:|
 | ch_ppocr_mobile_v2.0_det |[config](../configs/ch_ppocr_mobile_v2.0_det/train_infer_python.txt) | 53.836 | 53.343 / 53.914 / 52.785 |0.020940758 | 45.574 | 45.57 / 46.292 / 46.213 | 0.015596647 | 10,000| 2,000|
-| ch_ppocr_mobile_v2.0_rec |[config](../configs/ch_ppocr_mobile_v2.0_rec/train_infer_python.txt) | 2083.311 | 2043.194	/ 2066.372 / 2093.317 |0.023944295 | 2153.261 | 2167.561 /	2165.726 /	2155.614| 0.005511725 | 600,000| 160,000|
-| ch_ppocr_server_v2.0_det |[config](../configs/ch_ppocr_server_v2.0_det/train_infer_python.txt) | 20.716 | 20.739 /	20.807 /	20.755 |0.003268131 | 20.592 | 20.498 /	20.993 /	20.75| 0.023579288 | 10,000| 2,000|
-| ch_ppocr_server_v2.0_rec |[config](../configs/ch_ppocr_server_v2.0_rec/train_infer_python.txt) | 528.56 | 528.386 /	528.991 /	528.391 |0.001143687 | 1189.788 | 1190.007 /	1176.332 /	1192.084| 0.013213834 |  600,000| 160,000|
-| ch_PP-OCRv2_det	 |[config](../configs/ch_PP-OCRv2_det/train_infer_python.txt) | 13.87 | 13.386 /	13.529 /	13.428 |0.010569887 | 17.847 | 17.746 /	17.908 /	17.96| 0.011915367 | 10,000| 2,000|
-| ch_PP-OCRv2_rec	 |[config](../configs/ch_PP-OCRv2_rec/train_infer_python.txt) | 109.248 | 106.32 /	106.318 /	108.587 |0.020895687 | 117.491 | 117.62 /	117.757 /	117.726| 0.001163413 | 140,000| 40,000|
-| det_mv3_db_v2.0	 |[config](../configs/det_mv3_db_v2_0/train_infer_python.txt) | 61.802 | 62.078 /	61.802 /	62.008 |0.00444602 | 82.947 | 84.294 /	84.457 /	84.005| 0.005351836 | 10,000| 2,000|
-| det_r50_vd_db_v2.0	 |[config](../configs/det_r50_vd_db_v2.0/train_infer_python.txt) | 29.955 | 29.092 /	29.31 /	28.844 |0.015899011 | 51.097 |50.367 /	50.879 /	50.227| 0.012814717 | 10,000| 2,000|
-| det_r50_vd_east_v2.0	 |[config](../configs/det_r50_vd_east_v2.0/train_infer_python.txt) | 42.485 | 42.624 /	42.663 /	42.561 |0.00239083 | 67.61 |67.825/ 	68.299/ 	68.51| 0.00999854 | 10,000| 2,000|
-| det_r50_vd_pse_v2.0	 |[config](../configs/det_r50_vd_pse_v2.0/train_infer_python.txt) | 16.455 | 16.517 / 16.555 /	16.353 |0.012201752 | 27.02 |27.288 /	27.152 /	27.408| 0.009340339 | 10,000| 2,000|
-| rec_mv3_none_bilstm_ctc_v2.0	 |[config](../configs/rec_mv3_none_bilstm_ctc_v2.0/train_infer_python.txt) | 2288.358 | 2291.906 /	2293.725 /	2290.05 |0.001602197 | 2336.17 |2327.042 /	2328.093 /	2344.915| 0.007622025 | 600,000| 160,000|
-| layoutxlm_ser	 |[config](../configs/layoutxlm/train_infer_python.txt) | 18.001 | 18.114 /	18.107 /	18.307 |0.010924783 | 21.982 | 21.507 /	21.116 /	21.406| 0.018180127 | 1490 | 1490|
-| PP-Structure-table	 |[config](../configs/en_table_structure/train_infer_python.txt) | 14.151 | 14.077 /	14.23 /	14.25 |0.012140351 | 16.285 | 16.595 /	16.878 /	16.531 | 0.020559308 | 20,000| 5,000|
-| det_r50_dcn_fce_ctw_v2.0	 |[config](../configs/det_r50_dcn_fce_ctw_v2.0/train_infer_python.txt) | 14.057 | 14.029 /	14.02 /	14.014 |0.001069214 | 18.298 |18.411 /	18.376 /	18.331| 0.004345228 | 10,000| 2,000|
-| ch_PP-OCRv3_det	 |[config](../configs/ch_PP-OCRv3_det/train_infer_python.txt) | 8.622 | 8.431 /	8.423 /	8.479|0.006604552 | 14.203 |14.346	14.468	14.23| 0.016450097 | 10,000| 2,000|
-| ch_PP-OCRv3_rec	 |[config](../configs/ch_PP-OCRv3_rec/train_infer_python.txt) | 90.239 | 90.077 /	91.513 /	91.325|0.01569176 | | |  | 160,000| 40,000|
+| ch_ppocr_mobile_v2.0_rec |[config](../configs/ch_ppocr_mobile_v2.0_rec/train_infer_python.txt) | 2083.311 | 2043.194  / 2066.372 / 2093.317 |0.023944295 | 2153.261 | 2167.561 /  2165.726 /  2155.614| 0.005511725 | 600,000| 160,000|
+| ch_ppocr_server_v2.0_det |[config](../configs/ch_ppocr_server_v2.0_det/train_infer_python.txt) | 20.716 | 20.739 /    20.807 /    20.755 |0.003268131 | 20.592 | 20.498 / 20.993 /    20.75| 0.023579288 | 10,000| 2,000|
+| ch_ppocr_server_v2.0_rec |[config](../configs/ch_ppocr_server_v2.0_rec/train_infer_python.txt) | 528.56 | 528.386 /   528.991 /   528.391 |0.001143687 | 1189.788 | 1190.007 /    1176.332 /  1192.084| 0.013213834 |  600,000| 160,000|
+| ch_PP-OCRv2_det    |[config](../configs/ch_PP-OCRv2_det/train_infer_python.txt) | 13.87 | 13.386 /    13.529 /    13.428 |0.010569887 | 17.847 | 17.746 / 17.908 /    17.96| 0.011915367 | 10,000| 2,000|
+| ch_PP-OCRv2_rec    |[config](../configs/ch_PP-OCRv2_rec/train_infer_python.txt) | 109.248 | 106.32 /  106.318 /   108.587 |0.020895687 | 117.491 | 117.62 /   117.757 /   117.726| 0.001163413 | 140,000| 40,000|
+| det_mv3_db_v2.0    |[config](../configs/det_mv3_db_v2_0/train_infer_python.txt) | 61.802 | 62.078 /   61.802 /    62.008 |0.00444602 | 82.947 | 84.294 /  84.457 /    84.005| 0.005351836 | 10,000| 2,000|
+| det_r50_vd_db_v2.0     |[config](../configs/det_r50_vd_db_v2.0/train_infer_python.txt) | 29.955 | 29.092 /    29.31 / 28.844 |0.015899011 | 51.097 |50.367 /  50.879 /    50.227| 0.012814717 | 10,000| 2,000|
+| det_r50_vd_east_v2.0   |[config](../configs/det_r50_vd_east_v2.0/train_infer_python.txt) | 42.485 | 42.624 /  42.663 /    42.561 |0.00239083 | 67.61 |67.825/     68.299/     68.51| 0.00999854 | 10,000| 2,000|
+| det_r50_vd_pse_v2.0    |[config](../configs/det_r50_vd_pse_v2.0/train_infer_python.txt) | 16.455 | 16.517 / 16.555 /  16.353 |0.012201752 | 27.02 |27.288 /   27.152 /    27.408| 0.009340339 | 10,000| 2,000|
+| rec_mv3_none_bilstm_ctc_v2.0   |[config](../configs/rec_mv3_none_bilstm_ctc_v2.0/train_infer_python.txt) | 2288.358 | 2291.906 /  2293.725 /  2290.05 |0.001602197 | 2336.17 |2327.042 /  2328.093 /  2344.915| 0.007622025 | 600,000| 160,000|
+| layoutxlm_ser  |[config](../configs/layoutxlm/train_infer_python.txt) | 18.001 | 18.114 / 18.107 /    18.307 |0.010924783 | 21.982 | 21.507 / 21.116 /    21.406| 0.018180127 | 1490 | 1490|
+| PP-Structure-table     |[config](../configs/en_table_structure/train_infer_python.txt) | 14.151 | 14.077 /    14.23 / 14.25 |0.012140351 | 16.285 | 16.595 /  16.878 /    16.531 | 0.020559308 | 20,000| 5,000|
+| det_r50_dcn_fce_ctw_v2.0   |[config](../configs/det_r50_dcn_fce_ctw_v2.0/train_infer_python.txt) | 14.057 | 14.029 /  14.02 / 14.014 |0.001069214 | 18.298 |18.411 /  18.376 /    18.331| 0.004345228 | 10,000| 2,000|
+| ch_PP-OCRv3_det    |[config](../configs/ch_PP-OCRv3_det/train_infer_python.txt) | 8.622 | 8.431 / 8.423 / 8.479|0.006604552 | 14.203 |14.346  14.468  14.23| 0.016450097 | 10,000| 2,000|
+| ch_PP-OCRv3_rec    |[config](../configs/ch_PP-OCRv3_rec/train_infer_python.txt) | 90.239 | 90.077 /   91.513 /    91.325|0.01569176 | | |  | 160,000| 40,000|
diff --git a/test_tipc/docs/jeston_test_train_inference_python.md b/test_tipc/docs/jeston_test_train_inference_python.md
index 22fc21c1c..23d95cd94 100644
--- a/test_tipc/docs/jeston_test_train_inference_python.md
+++ b/test_tipc/docs/jeston_test_train_inference_python.md
@@ -59,8 +59,8 @@ test_tipc/output/
 
 其中`results_python.log`中包含了每条指令的运行状态,如果运行成功会输出:
 ```
-Run successfully with command - python tools/infer/predict_det.py --use_gpu=True --use_tensorrt=False --precision=fp32 --det_model_dir=./inference/ch_ppocr_mobile_v2.0_det_infer/ --rec_batch_num=1 --image_dir=./inference/ch_det_data_50/all-sum-510/ --benchmark=True   > ./test_tipc/output/python_infer_gpu_usetrt_False_precision_fp32_batchsize_1.log 2>&1 !  
-Run successfully with command - python tools/infer/predict_det.py --use_gpu=True --use_tensorrt=True --precision=fp32 --det_model_dir=./inference/ch_ppocr_mobile_v2.0_det_infer/ --rec_batch_num=1 --image_dir=./inference/ch_det_data_50/all-sum-510/ --benchmark=True   > ./test_tipc/output/python_infer_gpu_usetrt_True_precision_fp32_batchsize_1.log 2>&1 !  
+Run successfully with command - python tools/infer/predict_det.py --use_gpu=True --use_tensorrt=False --precision=fp32 --det_model_dir=./inference/ch_ppocr_mobile_v2.0_det_infer/ --rec_batch_num=1 --image_dir=./inference/ch_det_data_50/all-sum-510/ --benchmark=True   > ./test_tipc/output/python_infer_gpu_usetrt_False_precision_fp32_batchsize_1.log 2>&1 !
+Run successfully with command - python tools/infer/predict_det.py --use_gpu=True --use_tensorrt=True --precision=fp32 --det_model_dir=./inference/ch_ppocr_mobile_v2.0_det_infer/ --rec_batch_num=1 --image_dir=./inference/ch_det_data_50/all-sum-510/ --benchmark=True   > ./test_tipc/output/python_infer_gpu_usetrt_True_precision_fp32_batchsize_1.log 2>&1 !
 Run successfully with command - python tools/infer/predict_det.py --use_gpu=True --use_tensorrt=True --precision=fp16 --det_model_dir=./inference/ch_ppocr_mobile_v2.0_det_infer/ --rec_batch_num=1 --image_dir=./inference/ch_det_data_50/all-sum-510/ --benchmark=True   > ./test_tipc/output/python_infer_gpu_usetrt_True_precision_fp16_batchsize_1.log 2>&1 !
 ```
 如果运行失败,会输出:
@@ -84,7 +84,7 @@ Run failed with command - python tools/infer/predict_det.py --use_gpu=True --use
 python test_tipc/compare_results.py --gt_file=./test_tipc/results/python_*.txt  --log_file=./test_tipc/output/python_*.log --atol=1e-3 --rtol=1e-3
 ```
 
-参数介绍:  
+参数介绍:
 - gt_file: 指向事先保存好的预测结果路径,支持*.txt 结尾,会自动索引*.txt格式的文件,文件默认保存在test_tipc/result/ 文件夹下
 - log_file: 指向运行test_tipc/test_train_inference_python.sh 脚本的infer模式保存的预测日志,预测日志中打印的有预测结果,比如:文本框,预测文本,类别等等,同样支持python_infer_*.log格式传入
 - atol: 设置的绝对误差
@@ -108,6 +108,6 @@ ValueError: The results of python_infer_gpu_usetrt_True_precision_fp32_batchsize
 
 
 ## 3. 更多教程
-本文档为功能测试用,更丰富的训练预测使用教程请参考:  
-[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)  
+本文档为功能测试用,更丰富的训练预测使用教程请参考:
+[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)
 [基于Python预测引擎推理](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_ppocr.md)
diff --git a/test_tipc/docs/mac_test_train_inference_python.md b/test_tipc/docs/mac_test_train_inference_python.md
index 759ea5164..f3ece9313 100644
--- a/test_tipc/docs/mac_test_train_inference_python.md
+++ b/test_tipc/docs/mac_test_train_inference_python.md
@@ -54,13 +54,13 @@ Mac端无GPU,环境准备只需要Python环境即可,安装PaddlePaddle等
 # 配置文件中默认去掉了GPU和mkldnn相关的测试链条
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'lite_train_lite_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'lite_train_lite_infer'
-```  
+```
 
 - 模式2:lite_train_whole_infer,使用少量数据训练,一定量数据预测,用于验证训练后的模型执行预测,预测速度是否合理;
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'lite_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt  'lite_train_whole_infer'
-```  
+```
 
 - 模式3:whole_infer,不训练,全量数据预测,走通开源模型评估、动转静,检查inference model预测时间和精度;
 ```shell
@@ -69,13 +69,13 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'whole_infer'
 # 用法2: 指定GPU卡预测,第三个传入参数为GPU卡号
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'whole_infer' '1'
-```  
+```
 
 - 模式4:whole_train_whole_infer,CE: 全量数据训练,全量数据预测,验证模型训练精度,预测精度,预测速度;(Mac端不建议运行此模式)
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'whole_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_mac_cpu_normal_normal_infer_python_mac_cpu.txt 'whole_train_whole_infer'
-```  
+```
 
 运行相应指令后,在`test_tipc/output`文件夹下自动会保存运行日志。如`lite_train_lite_infer`模式下,会运行训练+inference的链条,因此,在`test_tipc/output`文件夹有以下文件:
 ```
@@ -114,7 +114,7 @@ Run failed with command - python3.7 tools/export_model.py -c tests/configs/det_m
 python test_tipc/compare_results.py --gt_file=./test_tipc/results/python_*.txt  --log_file=./test_tipc/output/python_*.log --atol=1e-3 --rtol=1e-3
 ```
 
-参数介绍:  
+参数介绍:
 - gt_file: 指向事先保存好的预测结果路径,支持*.txt 结尾,会自动索引*.txt格式的文件,文件默认保存在test_tipc/result/ 文件夹下
 - log_file: 指向运行test_tipc/test_train_inference_python.sh 脚本的infer模式保存的预测日志,预测日志中打印的有预测结果,比如:文本框,预测文本,类别等等,同样支持python_infer_*.log格式传入
 - atol: 设置的绝对误差
@@ -138,6 +138,6 @@ ValueError: The results of python_infer_cpu_usemkldnn_False_threads_1_batchsize_
 
 
 ## 3. 更多教程
-本文档为功能测试用,更丰富的训练预测使用教程请参考:  
-[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)  
+本文档为功能测试用,更丰富的训练预测使用教程请参考:
+[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)
 [基于Python预测引擎推理](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_ppocr.md)
diff --git a/test_tipc/docs/test_inference_cpp.md b/test_tipc/docs/test_inference_cpp.md
index 5d8aeda6c..5e9e85226 100644
--- a/test_tipc/docs/test_inference_cpp.md
+++ b/test_tipc/docs/test_inference_cpp.md
@@ -26,7 +26,7 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_no
 bash test_tipc/test_inference_cpp.sh test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt
 # 用法2: 指定GPU卡预测,第三个传入参数为GPU卡号
 bash test_tipc/test_inference_cpp.sh test_tipc/configs/ch_PP-OCRv2_rec/model_linux_gpu_normal_normal_infer_cpp_linux_gpu_cpu.txt '1'
-```  
+```
 
 运行预测指令后,在`test_tipc/output`文件夹下自动会保存运行日志,包括以下文件:
 
@@ -66,7 +66,7 @@ Run failed with command - ch_PP-OCRv2_rec - ./deploy/cpp_infer/build/ppocr --rec
 python3.7 test_tipc/compare_results.py --gt_file=./test_tipc/results/cpp_*.txt  --log_file=./test_tipc/output/cpp_*.log --atol=1e-3 --rtol=1e-3
 ```
 
-参数介绍:  
+参数介绍:
 - gt_file: 指向事先保存好的预测结果路径,支持*.txt 结尾,会自动索引*.txt格式的文件,文件默认保存在test_tipc/result/ 文件夹下
 - log_file: 指向运行test_tipc/test_inference_cpp.sh 脚本的infer模式保存的预测日志,预测日志中打印的有预测结果,比如:文本框,预测文本,类别等等,同样支持cpp_infer_*.log格式传入
 - atol: 设置的绝对误差
@@ -83,4 +83,4 @@ python3.7 test_tipc/compare_results.py --gt_file=./test_tipc/results/cpp_*.txt
 
 ## 3. 更多教程
 
-本文档为功能测试用,更详细的c++预测使用教程请参考:[服务器端C++预测](https://github.com/PaddlePaddle/PaddleOCR/tree/dygraph/deploy/cpp_infer)  
+本文档为功能测试用,更详细的c++预测使用教程请参考:[服务器端C++预测](https://github.com/PaddlePaddle/PaddleOCR/tree/dygraph/deploy/cpp_infer)
diff --git a/test_tipc/docs/test_lite_arm_cpp.md b/test_tipc/docs/test_lite_arm_cpp.md
index 166b5981b..b84e06909 100644
--- a/test_tipc/docs/test_lite_arm_cpp.md
+++ b/test_tipc/docs/test_lite_arm_cpp.md
@@ -39,7 +39,7 @@ bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_lin
 # 手机端测试:
 bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_cpu.txt
 
-```  
+```
 
 #### 2.1.2 基于ARM\_GPU\_OPENCL测试
 
@@ -54,7 +54,7 @@ bash test_tipc/prepare_lite_cpp.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_lin
 # 手机端测试:
 bash test_lite_arm_cpp.sh model_linux_gpu_normal_normal_lite_cpp_arm_gpu_opencl.txt
 
-```  
+```
 
 
 **注意**:
diff --git a/test_tipc/docs/test_paddle2onnx.md b/test_tipc/docs/test_paddle2onnx.md
index 299621d01..939338847 100644
--- a/test_tipc/docs/test_paddle2onnx.md
+++ b/test_tipc/docs/test_paddle2onnx.md
@@ -7,7 +7,7 @@ PaddleServing预测功能测试的主程序为`test_paddle2onnx.sh`,可以测
 基于训练是否使用量化,进行本测试的模型可以分为`正常模型`和`量化模型`,这两类模型对应的Paddle2ONNX预测功能汇总如下:
 
 | 模型类型 |device |
-|  ----   |  ---- |  
+|  ----   |  ---- |
 | 正常模型 | GPU |
 | 正常模型 | CPU |
 | 量化模型 | GPU |
@@ -22,7 +22,7 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_no
 
 # 用法:
 bash test_tipc/test_paddle2onnx.sh ./test_tipc/configs/ch_PP-OCRv2_det/model_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
-```  
+```
 
 #### 运行结果
 
@@ -45,4 +45,4 @@ Run failed with command - ch_PP-OCRv2_det -  paddle2onnx --model_dir=./inference
 
 ## 3. 更多教程
 
-本文档为功能测试用,更详细的Paddle2onnx预测使用教程请参考:[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX)  
+本文档为功能测试用,更详细的Paddle2onnx预测使用教程请参考:[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX)
diff --git a/test_tipc/docs/test_ptq_inference_python.md b/test_tipc/docs/test_ptq_inference_python.md
index 7887c0b5c..b118b66f8 100644
--- a/test_tipc/docs/test_ptq_inference_python.md
+++ b/test_tipc/docs/test_ptq_inference_python.md
@@ -6,7 +6,7 @@ Linux GPU/CPU KL离线量化训练推理测试的主程序为`test_ptq_inference
 - 训练相关:
 
 | 算法名称 | 模型名称 | 单机单卡 |
-|  :----: |   :----:  |    :----:  |  
+|  :----: |   :----:  |    :----:  |
 |    | model_name | KL离线量化训练 |
 
 - 推理相关:
@@ -26,7 +26,7 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_ptq_infer_py
 
 # 用法:
 bash test_tipc/test_ptq_inference_python.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_ptq_infer_python.txt "whole_infer"
-```  
+```
 
 #### 运行结果
 
@@ -48,4 +48,4 @@ Run failed with command - ch_PP-OCRv2_det_KL - python3.7 deploy/slim/quantizatio
 
 ## 3. 更多教程
 
-本文档为功能测试用,更详细的量化使用教程请参考:[量化](../../deploy/slim/quantization/README.md)  
+本文档为功能测试用,更详细的量化使用教程请参考:[量化](../../deploy/slim/quantization/README.md)
diff --git a/test_tipc/docs/test_serving.md b/test_tipc/docs/test_serving.md
index ef3888878..1be7fa259 100644
--- a/test_tipc/docs/test_serving.md
+++ b/test_tipc/docs/test_serving.md
@@ -25,7 +25,7 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal
 
 # 用法:
 bash test_tipc/test_serving_infer_python.sh ./test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt "serving_infer"
-```  
+```
 **cpp serving**
 先运行`prepare.sh`准备数据和模型,然后运行`test_serving_infer_cpp.sh`进行测试,最终在```test_tipc/output/{model_name}/serving_infer/cpp```目录下生成`cpp_*.log`后缀的日志文件。
 
@@ -34,7 +34,7 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal
 
 # 用法:
 bash test_tipc/test_serving_infer_cpp.sh ./test_tipc/configs/ch_PP-OCRv2/model_linux_gpu_normal_normal_serving_cpp_linux_gpu_cpu.txt "serving_infer"
-```  
+```
 
 #### 运行结果
 
@@ -60,4 +60,4 @@ Run failed with command - ch_PP-OCRv2_rec - nohup python3.7 web_service_rec.py -
 
 ## 3. 更多教程
 
-本文档为功能测试用,更详细的Serving预测使用教程请参考:[PPOCR 服务化部署](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/deploy/pdserving/README_CN.md)  
+本文档为功能测试用,更详细的Serving预测使用教程请参考:[PPOCR 服务化部署](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/deploy/pdserving/README_CN.md)
diff --git a/test_tipc/docs/test_train_fleet_inference_python.md b/test_tipc/docs/test_train_fleet_inference_python.md
index 9fddb5d16..15f32c973 100644
--- a/test_tipc/docs/test_train_fleet_inference_python.md
+++ b/test_tipc/docs/test_train_fleet_inference_python.md
@@ -61,9 +61,9 @@ bash test_tipc/test_train_inference_python.sh  test_tipc/configs/ch_PP-OCRv3_rec
 输出结果如下,表示命令运行成功。
 
 ```bash
- Run successfully with command - ch_PP-OCRv3_rec - python3.7 -m paddle.distributed.launch --ips=192.168.0.1,192.168.0.2 --gpus=0,1 tools/train.py -c test_tipc/configs/ch_PP-OCRv3_rec/ch_PP-OCRv3_rec_distillation.yml -o  Global.use_gpu=True Global.save_model_dir=./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/norm_train_gpus_0,1_autocast_fp32_nodes_2   Global.epoch_num=3 Global.auto_cast=fp32 Train.loader.batch_size_per_card=16    !  
+ Run successfully with command - ch_PP-OCRv3_rec - python3.7 -m paddle.distributed.launch --ips=192.168.0.1,192.168.0.2 --gpus=0,1 tools/train.py -c test_tipc/configs/ch_PP-OCRv3_rec/ch_PP-OCRv3_rec_distillation.yml -o  Global.use_gpu=True Global.save_model_dir=./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/norm_train_gpus_0,1_autocast_fp32_nodes_2   Global.epoch_num=3 Global.auto_cast=fp32 Train.loader.batch_size_per_card=16    !
  ......
-  Run successfully with command - ch_PP-OCRv3_rec - python3.7 tools/infer/predict_rec.py --rec_image_shape="3,48,320" --use_gpu=False --enable_mkldnn=False --cpu_threads=6 --rec_model_dir=./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/norm_train_gpus_0,1_autocast_fp32_nodes_2/Student --rec_batch_num=1   --image_dir=./inference/rec_inference --benchmark=True --precision=fp32   > ./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/python_infer_cpu_usemkldnn_False_threads_6_precision_fp32_batchsize_1.log 2>&1 !  
+  Run successfully with command - ch_PP-OCRv3_rec - python3.7 tools/infer/predict_rec.py --rec_image_shape="3,48,320" --use_gpu=False --enable_mkldnn=False --cpu_threads=6 --rec_model_dir=./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/norm_train_gpus_0,1_autocast_fp32_nodes_2/Student --rec_batch_num=1   --image_dir=./inference/rec_inference --benchmark=True --precision=fp32   > ./test_tipc/output/ch_PP-OCRv3_rec/lite_train_lite_infer/python_infer_cpu_usemkldnn_False_threads_6_precision_fp32_batchsize_1.log 2>&1 !
 ```
 
 在开启benchmark参数时,可以得到测试的详细数据,包含运行环境信息(系统版本、CUDA版本、CUDNN版本、驱动版本),Paddle版本信息,参数设置信息(运行设备、线程数、是否开启内存优化等),模型信息(模型名称、精度),数据信息(batchsize、是否为动态shape等),性能信息(CPU,GPU的占用、运行耗时、预处理耗时、推理耗时、后处理耗时),内容如下所示:
diff --git a/test_tipc/docs/test_train_inference_python.md b/test_tipc/docs/test_train_inference_python.md
index d1dbd8ee4..4578057f9 100644
--- a/test_tipc/docs/test_train_inference_python.md
+++ b/test_tipc/docs/test_train_inference_python.md
@@ -63,13 +63,13 @@ Linux端基础训练预测功能测试的主程序为`test_train_inference_pytho
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'lite_train_lite_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'lite_train_lite_infer'
-```  
+```
 
 - 模式2:lite_train_whole_infer,使用少量数据训练,一定量数据预测,用于验证训练后的模型执行预测,预测速度是否合理;
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt  'lite_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ../test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'lite_train_whole_infer'
-```  
+```
 
 - 模式3:whole_infer,不训练,全量数据预测,走通开源模型评估、动转静,检查inference model预测时间和精度;
 ```shell
@@ -78,13 +78,13 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_inf
 bash test_tipc/test_train_inference_python.sh ../test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'whole_infer'
 # 用法2: 指定GPU卡预测,第三个传入参数为GPU卡号
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'whole_infer' '1'
-```  
+```
 
 - 模式4:whole_train_whole_infer,CE: 全量数据训练,全量数据预测,验证模型训练精度,预测精度,预测速度;
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'whole_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_infer_python.txt 'whole_train_whole_infer'
-```  
+```
 
 运行相应指令后,在`test_tipc/output`文件夹下自动会保存运行日志。如'lite_train_lite_infer'模式下,会运行训练+inference的链条,因此,在`test_tipc/output`文件夹有以下文件:
 ```
@@ -121,7 +121,7 @@ Run failed with command - python3.7 tools/export_model.py -c tests/configs/det_m
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_pact_infer_python.txt 'lite_train_lite_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_pact_infer_python.txt 'lite_train_lite_infer'
-```  
+```
 #### 2.2.3 混合精度训练链条
 此外,`test_train_inference_python.sh`还包含混合精度训练模式,命令如下:
 以ch_PP-OCRv2_det为例,如需测试其他模型更换配置即可。
@@ -129,7 +129,7 @@ bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_PP-OCRv2_de
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt 'lite_train_lite_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_PP-OCRv2_det/train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt 'lite_train_lite_infer'
-```  
+```
 
 ### 2.3 精度测试
 
@@ -144,7 +144,7 @@ bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_PP-OCRv2_de
 python3.7 test_tipc/compare_results.py --gt_file=./test_tipc/results/python_*.txt  --log_file=./test_tipc/output/python_*.log --atol=1e-3 --rtol=1e-3
 ```
 
-参数介绍:  
+参数介绍:
 - gt_file: 指向事先保存好的预测结果路径,支持*.txt 结尾,会自动索引*.txt格式的文件,文件默认保存在test_tipc/result/ 文件夹下
 - log_file: 指向运行test_tipc/test_train_inference_python.sh 脚本的infer模式保存的预测日志,预测日志中打印的有预测结果,比如:文本框,预测文本,类别等等,同样支持python_infer_*.log格式传入
 - atol: 设置的绝对误差
@@ -160,6 +160,6 @@ python3.7 test_tipc/compare_results.py --gt_file=./test_tipc/results/python_*.tx
 
 
 ## 3. 更多教程
-本文档为功能测试用,更丰富的训练预测使用教程请参考:  
-[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)  
+本文档为功能测试用,更丰富的训练预测使用教程请参考:
+[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)
 [基于Python预测引擎推理](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_ppocr.md)
diff --git a/test_tipc/docs/win_test_train_inference_python.md b/test_tipc/docs/win_test_train_inference_python.md
index d631c3887..5f80df134 100644
--- a/test_tipc/docs/win_test_train_inference_python.md
+++ b/test_tipc/docs/win_test_train_inference_python.md
@@ -55,13 +55,13 @@ Windows端基础训练预测功能测试的主程序为`test_train_inference_pyt
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'lite_train_lite_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'lite_train_lite_infer'
-```  
+```
 
 - 模式2:lite_train_whole_infer,使用少量数据训练,一定量数据预测,用于验证训练后的模型执行预测,预测速度是否合理;
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'lite_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'lite_train_whole_infer'
-```  
+```
 
 - 模式3:whole_infer,不训练,全量数据预测,走通开源模型评估、动转静,检查inference model预测时间和精度;
 ```shell
@@ -70,13 +70,13 @@ bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_win
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'whole_infer'
 # 用法2: 指定GPU卡预测,第三个传入参数为GPU卡号
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'whole_infer' '1'
-```  
+```
 
 - 模式4:whole_train_whole_infer,CE: 全量数据训练,全量数据预测,验证模型训练精度,预测精度,预测速度;
 ```shell
 bash test_tipc/prepare.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'whole_train_whole_infer'
 bash test_tipc/test_train_inference_python.sh ./test_tipc/configs/ch_ppocr_mobile_v2_0_det/train_windows_gpu_normal_normal_infer_python_windows_cpu_gpu.txt  'whole_train_whole_infer'
-```  
+```
 
 运行相应指令后,在`test_tipc/output`文件夹下自动会保存运行日志。如'lite_train_lite_infer'模式下,会运行训练+inference的链条,因此,在`test_tipc/output`文件夹有以下文件:
 ```
@@ -117,7 +117,7 @@ Run failed with command - python3.7 tools/export_model.py -c tests/configs/det_m
 python test_tipc/compare_results.py --gt_file=./test_tipc/results/python_*.txt  --log_file=./test_tipc/output/python_*.log --atol=1e-3 --rtol=1e-3
 ```
 
-参数介绍:  
+参数介绍:
 - gt_file: 指向事先保存好的预测结果路径,支持*.txt 结尾,会自动索引*.txt格式的文件,文件默认保存在test_tipc/result/ 文件夹下
 - log_file: 指向运行test_tipc/test_train_inference_python.sh 脚本的infer模式保存的预测日志,预测日志中打印的有预测结果,比如:文本框,预测文本,类别等等,同样支持python_infer_*.log格式传入
 - atol: 设置的绝对误差
@@ -141,6 +141,6 @@ ValueError: The results of python_infer_cpu_usemkldnn_False_threads_1_batchsize_
 
 
 ## 3. 更多教程
-本文档为功能测试用,更丰富的训练预测使用教程请参考:  
-[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)  
+本文档为功能测试用,更丰富的训练预测使用教程请参考:
+[模型训练](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/training.md)
 [基于Python预测引擎推理](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_ppocr.md)
diff --git a/test_tipc/supplementary/__init__.py b/test_tipc/supplementary/__init__.py
index 8b1378917..e69de29bb 100644
--- a/test_tipc/supplementary/__init__.py
+++ b/test_tipc/supplementary/__init__.py
@@ -1 +0,0 @@
-
diff --git a/test_tipc/supplementary/custom_op/custom_relu_op.cc b/test_tipc/supplementary/custom_op/custom_relu_op.cc
index 86d8380c2..6c48da7a4 100644
--- a/test_tipc/supplementary/custom_op/custom_relu_op.cc
+++ b/test_tipc/supplementary/custom_op/custom_relu_op.cc
@@ -103,4 +103,4 @@ PD_BUILD_OP(custom_relu)
 PD_BUILD_GRAD_OP(custom_relu)
     .Inputs({"X", "Out", paddle::Grad("Out")})
     .Outputs({paddle::Grad("X")})
-    .SetKernelFn(PD_KERNEL(ReluBackward));
\ No newline at end of file
+    .SetKernelFn(PD_KERNEL(ReluBackward));
diff --git a/test_tipc/supplementary/mv3_distill.yml b/test_tipc/supplementary/mv3_distill.yml
index 887b1eb17..57b813029 100644
--- a/test_tipc/supplementary/mv3_distill.yml
+++ b/test_tipc/supplementary/mv3_distill.yml
@@ -28,4 +28,3 @@ TRAIN:
 VALID:
     batch_size: 64
     num_workers: 4
-
diff --git a/test_tipc/supplementary/mv3_large_x0_5.yml b/test_tipc/supplementary/mv3_large_x0_5.yml
index 531c2f0f5..fd7d3084a 100644
--- a/test_tipc/supplementary/mv3_large_x0_5.yml
+++ b/test_tipc/supplementary/mv3_large_x0_5.yml
@@ -46,4 +46,3 @@ TRAIN:
 VALID:
     batch_size: 64
     num_workers: 4
-
diff --git a/test_tipc/supplementary/readme.md b/test_tipc/supplementary/readme.md
index a378fc5f3..8e822faeb 100644
--- a/test_tipc/supplementary/readme.md
+++ b/test_tipc/supplementary/readme.md
@@ -64,11 +64,11 @@ test_tipc/output/
 其中results_python.log中包含了每条指令的运行状态,如果运行成功会输出:
 
 ```
-Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=20       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=False  !  
-Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=False  !  
-Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=True  !  
-Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls_distill MODEL.siamese=False  !  
-Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls_distill MODEL.siamese=True  !  
+Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=20       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=False  !
+Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=False  !
+Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls MODEL.siamese=True  !
+Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls_distill MODEL.siamese=False  !
+Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls_distill MODEL.siamese=True  !
 Run successfully with command - python3.7 train.py -c mv3_large_x0_5.yml -o  use_gpu=True     epoch=2       AMP.use_amp=True TRAIN.batch_size=1280  use_custom_relu=False model_type=cls_distill_multiopt MODEL.siamese=False  !
 
 ```
diff --git a/test_tipc/supplementary/test_tipc/common_func.sh b/test_tipc/supplementary/test_tipc/common_func.sh
index e2ff5c4d7..844536ad7 100644
--- a/test_tipc/supplementary/test_tipc/common_func.sh
+++ b/test_tipc/supplementary/test_tipc/common_func.sh
@@ -62,4 +62,4 @@ function status_check(){
     else
         echo -e "\033[33m Run failed with command - ${run_command}!  \033[0m" | tee -a ${run_log}
     fi
-}
\ No newline at end of file
+}
diff --git a/test_tipc/supplementary/test_tipc/test_train_python.sh b/test_tipc/supplementary/test_tipc/test_train_python.sh
index ed709c1c4..53e1cc913 100644
--- a/test_tipc/supplementary/test_tipc/test_train_python.sh
+++ b/test_tipc/supplementary/test_tipc/test_train_python.sh
@@ -112,7 +112,3 @@ if [ ${MODE} = "lite_train_lite_infer" ] || [ ${MODE} = "whole_train_whole_infer
         done
     done
 fi
-
-
-
-
diff --git a/test_tipc/test_inference_python.sh b/test_tipc/test_inference_python.sh
index e9908df1f..a1273584f 100644
--- a/test_tipc/test_inference_python.sh
+++ b/test_tipc/test_inference_python.sh
@@ -167,5 +167,3 @@ if [ ${MODE} = "whole_infer" ]; then
         Count=$(($Count + 1))
     done
 fi
-
-
diff --git a/test_tipc/test_ptq_inference_python.sh b/test_tipc/test_ptq_inference_python.sh
index caf3d5060..f9cd829f2 100644
--- a/test_tipc/test_ptq_inference_python.sh
+++ b/test_tipc/test_ptq_inference_python.sh
@@ -155,4 +155,3 @@ if [ ${MODE} = "whole_infer" ]; then
         Count=$(($Count + 1))
     done
 fi
-
diff --git a/test_tipc/test_train_inference_python.sh b/test_tipc/test_train_inference_python.sh
index 9a94db858..95a6bd864 100644
--- a/test_tipc/test_train_inference_python.sh
+++ b/test_tipc/test_train_inference_python.sh
@@ -340,4 +340,4 @@ else
             done  # done with:    for trainer in ${trainer_list[*]}; do 
         done      # done with:    for autocast in ${autocast_list[*]}; do 
     done          # done with:    for gpu in ${gpu_list[*]}; do
-fi  # end if [ ${MODE} = "infer" ]; then
\ No newline at end of file
+fi  # end if [ ${MODE} = "infer" ]; then
diff --git a/test_tipc/web/index.html b/test_tipc/web/index.html
index 39921fbf0..211115502 100644
--- a/test_tipc/web/index.html
+++ b/test_tipc/web/index.html
@@ -10,4 +10,4 @@
     <img id="ocr" src="./test.jpg" />
 </body>
 <script src="./node_modules/@paddlejs-models/ocr/lib/index.js"></script>
-</html>
\ No newline at end of file
+</html>
diff --git a/train.sh b/train.sh
index 6fa04ea3f..4225470cb 100644
--- a/train.sh
+++ b/train.sh
@@ -1,2 +1,2 @@
 # recommended paddle.__version__ == 2.0.0
-python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3,4,5,6,7'  tools/train.py -c configs/rec/rec_mv3_none_bilstm_ctc.yml
\ No newline at end of file
+python3 -m paddle.distributed.launch --log_dir=./debug/ --gpus '0,1,2,3,4,5,6,7'  tools/train.py -c configs/rec/rec_mv3_none_bilstm_ctc.yml