From 37ff38e7aa153a82b045a41a7ae46df9d2de48fc Mon Sep 17 00:00:00 2001 From: liukuikun <24622904+Harold-lkk@users.noreply.github.com> Date: Thu, 4 Aug 2022 15:25:54 +0800 Subject: [PATCH] [Fix] fix base name and dist_xx (#1253) * fix base name and dist_xx * update registry --- .../models/textrecog/recognizers/__init__.py | 2 +- .../{base.py => base_recognizer.py} | 0 .../recognizers/encoder_decoder_recognizer.py | 2 +- mmocr/registry.py | 25 +++++++++---- tools/dist_test.sh | 6 ---- tools/dist_train.sh | 35 ++++++------------- 6 files changed, 31 insertions(+), 39 deletions(-) rename mmocr/models/textrecog/recognizers/{base.py => base_recognizer.py} (100%) diff --git a/mmocr/models/textrecog/recognizers/__init__.py b/mmocr/models/textrecog/recognizers/__init__.py index dec944e2..9bc7115a 100644 --- a/mmocr/models/textrecog/recognizers/__init__.py +++ b/mmocr/models/textrecog/recognizers/__init__.py @@ -1,6 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. from .abinet import ABINet -from .base import BaseRecognizer +from .base_recognizer import BaseRecognizer from .crnn import CRNN from .encoder_decoder_recognizer import EncoderDecoderRecognizer from .master import MASTER diff --git a/mmocr/models/textrecog/recognizers/base.py b/mmocr/models/textrecog/recognizers/base_recognizer.py similarity index 100% rename from mmocr/models/textrecog/recognizers/base.py rename to mmocr/models/textrecog/recognizers/base_recognizer.py diff --git a/mmocr/models/textrecog/recognizers/encoder_decoder_recognizer.py b/mmocr/models/textrecog/recognizers/encoder_decoder_recognizer.py index c6aa295b..d49644a3 100644 --- a/mmocr/models/textrecog/recognizers/encoder_decoder_recognizer.py +++ b/mmocr/models/textrecog/recognizers/encoder_decoder_recognizer.py @@ -8,7 +8,7 @@ from mmocr.registry import MODELS from mmocr.utils.typing import (ConfigType, InitConfigType, OptConfigType, OptRecSampleList, RecForwardResults, RecSampleList) -from .base import BaseRecognizer +from .base_recognizer import BaseRecognizer @MODELS.register_module() diff --git a/mmocr/registry.py b/mmocr/registry.py index e2e68a1d..0f3b6616 100644 --- a/mmocr/registry.py +++ b/mmocr/registry.py @@ -1,6 +1,6 @@ # Copyright (c) OpenMMLab. All rights reserved. -"""MMOCR provides 17 registry nodes to support using modules across projects. -Each node is a child of the root registry in MMEngine. +"""MMDetection provides 17 registry nodes to support using modules across +projects. Each node is a child of the root registry in MMEngine. More details can be found at https://mmengine.readthedocs.io/en/latest/tutorials/registry.html. @@ -8,13 +8,16 @@ https://mmengine.readthedocs.io/en/latest/tutorials/registry.html. from mmengine.registry import DATA_SAMPLERS as MMENGINE_DATA_SAMPLERS from mmengine.registry import DATASETS as MMENGINE_DATASETS +from mmengine.registry import EVALUATOR as MMENGINE_EVALUATOR from mmengine.registry import HOOKS as MMENGINE_HOOKS +from mmengine.registry import LOG_PROCESSORS as MMENGINE_LOG_PROCESSORS from mmengine.registry import LOOPS as MMENGINE_LOOPS from mmengine.registry import METRICS as MMENGINE_METRICS from mmengine.registry import MODEL_WRAPPERS as MMENGINE_MODEL_WRAPPERS from mmengine.registry import MODELS as MMENGINE_MODELS from mmengine.registry import \ OPTIM_WRAPPER_CONSTRUCTORS as MMENGINE_OPTIM_WRAPPER_CONSTRUCTORS +from mmengine.registry import OPTIM_WRAPPERS as MMENGINE_OPTIM_WRAPPERS from mmengine.registry import OPTIMIZERS as MMENGINE_OPTIMIZERS from mmengine.registry import PARAM_SCHEDULERS as MMENGINE_PARAM_SCHEDULERS from mmengine.registry import \ @@ -43,24 +46,29 @@ DATASETS = Registry('dataset', parent=MMENGINE_DATASETS) DATA_SAMPLERS = Registry('data sampler', parent=MMENGINE_DATA_SAMPLERS) TRANSFORMS = Registry('transform', parent=MMENGINE_TRANSFORMS) -# mangage all kinds of modules inheriting `nn.Module` +# manage all kinds of modules inheriting `nn.Module` MODELS = Registry('model', parent=MMENGINE_MODELS) -# mangage all kinds of model wrappers like 'MMDistributedDataParallel' +# manage all kinds of model wrappers like 'MMDistributedDataParallel' MODEL_WRAPPERS = Registry('model_wrapper', parent=MMENGINE_MODEL_WRAPPERS) -# mangage all kinds of weight initialization modules like `Uniform` +# manage all kinds of weight initialization modules like `Uniform` WEIGHT_INITIALIZERS = Registry( 'weight initializer', parent=MMENGINE_WEIGHT_INITIALIZERS) -# mangage all kinds of optimizers like `SGD` and `Adam` +# manage all kinds of optimizers like `SGD` and `Adam` OPTIMIZERS = Registry('optimizer', parent=MMENGINE_OPTIMIZERS) +# manage optimizer wrapper +OPTIM_WRAPPERS = Registry('optim wrapper', parent=MMENGINE_OPTIM_WRAPPERS) # manage constructors that customize the optimization hyperparameters. OPTIM_WRAPPER_CONSTRUCTORS = Registry( 'optimizer constructor', parent=MMENGINE_OPTIM_WRAPPER_CONSTRUCTORS) -# mangage all kinds of parameter schedulers like `MultiStepLR` +# manage all kinds of parameter schedulers like `MultiStepLR` PARAM_SCHEDULERS = Registry( 'parameter scheduler', parent=MMENGINE_PARAM_SCHEDULERS) + # manage all kinds of metrics METRICS = Registry('metric', parent=MMENGINE_METRICS) +# manage evaluator +EVALUATOR = Registry('evaluator', parent=MMENGINE_EVALUATOR) # manage task-specific modules like anchor generators and box coders TASK_UTILS = Registry('task util', parent=MMENGINE_TASK_UTILS) @@ -69,3 +77,6 @@ TASK_UTILS = Registry('task util', parent=MMENGINE_TASK_UTILS) VISUALIZERS = Registry('visualizer', parent=MMENGINE_VISUALIZERS) # manage visualizer backend VISBACKENDS = Registry('vis_backend', parent=MMENGINE_VISBACKENDS) + +# manage logprocessor +LOG_PROCESSORS = Registry('log_processor', parent=MMENGINE_LOG_PROCESSORS) diff --git a/tools/dist_test.sh b/tools/dist_test.sh index b4246946..dea131b4 100755 --- a/tools/dist_test.sh +++ b/tools/dist_test.sh @@ -1,11 +1,5 @@ #!/usr/bin/env bash -if [ $# -lt 3 ] -then - echo "Usage: bash $0 CONFIG CHECKPOINT GPUS" - exit -fi - CONFIG=$1 CHECKPOINT=$2 GPUS=$3 diff --git a/tools/dist_train.sh b/tools/dist_train.sh index b939dd93..3f5b40b2 100755 --- a/tools/dist_train.sh +++ b/tools/dist_train.sh @@ -1,33 +1,20 @@ + #!/usr/bin/env bash -if [ $# -lt 3 ] -then - echo "Usage: bash $0 CONFIG WORK_DIR GPUS" - exit -fi - CONFIG=$1 -WORK_DIR=$2 -GPUS=$3 +GPUS=$2 NNODES=${NNODES:-1} NODE_RANK=${NODE_RANK:-0} PORT=${PORT:-29500} MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"} PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ - -if [ ${GPUS} == 1 ]; then - python $(dirname "$0")/train.py $CONFIG --work-dir=${WORK_DIR} ${@:4} -else - python -m torch.distributed.launch \ - --nnodes=$NNODES \ - --node_rank=$NODE_RANK \ - --master_addr=$MASTER_ADDR \ - --nproc_per_node=$GPUS \ - --master_port=$PORT \ - $(dirname "$0")/train.py \ - $CONFIG \ - --seed 0 \ - --work-dir=${WORK_DIR} \ - --launcher pytorch ${@:4} -fi +python -m torch.distributed.launch \ + --nnodes=$NNODES \ + --node_rank=$NODE_RANK \ + --master_addr=$MASTER_ADDR \ + --nproc_per_node=$GPUS \ + --master_port=$PORT \ + $(dirname "$0")/train.py \ + $CONFIG \ + --launcher pytorch ${@:3}