diff --git a/ppcls/arch/backbone/base/theseus_layer.py b/ppcls/arch/backbone/base/theseus_layer.py
index b5afd823d..466b16318 100644
--- a/ppcls/arch/backbone/base/theseus_layer.py
+++ b/ppcls/arch/backbone/base/theseus_layer.py
@@ -44,6 +44,33 @@ class TheseusLayer(nn.Layer):
     def _save_sub_res_hook(self, layer, input, output):
         self.res_dict[self.res_name] = output
 
+    def init_res(self,
+                 stages_pattern,
+                 return_patterns=None,
+                 return_stages=None):
+        if return_patterns and return_stages:
+            msg = f"The 'return_patterns' would be ignored when 'return_stages' is set."
+            logger.warning(msg)
+            return_stages = None
+
+        if return_stages is True:
+            return_patterns = stages_pattern
+        if isinstance(return_stages, int):
+            return_stages = [return_stages]
+        if isinstance(return_stages, list):
+            if max(return_stages) > len(stages_pattern) or min(
+                    return_stages) < 0:
+                msg = f"The 'return_stages' set error. Illegal value(s) have been ignored. The stages' pattern list is {stages_pattern}."
+                logger.warning(msg)
+                return_stages = [
+                    val for val in return_stages
+                    if val >= 0 and val < len(stages_pattern)
+                ]
+            return_patterns = [stages_pattern[i] for i in return_stages]
+
+        if return_patterns:
+            self.update_res(return_patterns)
+
     def replace_sub(self, *args, **kwargs) -> None:
         msg = "The function 'replace_sub()' is deprecated, please use 'upgrade_sublayer()' instead."
         logger.error(DeprecationWarning(msg))
diff --git a/ppcls/arch/backbone/legendary_models/esnet.py b/ppcls/arch/backbone/legendary_models/esnet.py
index 3a8d66903..e05e0ceb3 100644
--- a/ppcls/arch/backbone/legendary_models/esnet.py
+++ b/ppcls/arch/backbone/legendary_models/esnet.py
@@ -36,6 +36,8 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ESNet_x1_0_pretrained.pdparams",
 }
 
+MODEL_STAGES_PATTERN = {"ESNet": ["blocks[2]", "blocks[9]", "blocks[12]"]}
+
 __all__ = list(MODEL_URLS.keys())
 
 
@@ -214,11 +216,13 @@ class ESBlock2(TheseusLayer):
 
 class ESNet(TheseusLayer):
     def __init__(self,
+                 stages_pattern,
                  class_num=1000,
                  scale=1.0,
                  dropout_prob=0.2,
                  class_expand=1280,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
         self.scale = scale
         self.class_num = class_num
@@ -269,8 +273,10 @@ class ESNet(TheseusLayer):
         self.flatten = nn.Flatten(start_axis=1, stop_axis=-1)
         self.fc = Linear(self.class_expand, self.class_num)
 
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.conv1(x)
@@ -309,7 +315,8 @@ def ESNet_x0_25(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ESNet_x0_25` model depends on args.
     """
-    model = ESNet(scale=0.25, **kwargs)
+    model = ESNet(
+        scale=0.25, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_25"], use_ssld)
     return model
 
@@ -324,7 +331,8 @@ def ESNet_x0_5(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ESNet_x0_5` model depends on args.
     """
-    model = ESNet(scale=0.5, **kwargs)
+    model = ESNet(
+        scale=0.5, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_5"], use_ssld)
     return model
 
@@ -339,7 +347,8 @@ def ESNet_x0_75(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ESNet_x0_75` model depends on args.
     """
-    model = ESNet(scale=0.75, **kwargs)
+    model = ESNet(
+        scale=0.75, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x0_75"], use_ssld)
     return model
 
@@ -354,6 +363,7 @@ def ESNet_x1_0(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ESNet_x1_0` model depends on args.
     """
-    model = ESNet(scale=1.0, **kwargs)
+    model = ESNet(
+        scale=1.0, stages_pattern=MODEL_STAGES_PATTERN["ESNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ESNet_x1_0"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/hrnet.py b/ppcls/arch/backbone/legendary_models/hrnet.py
index da6c5f676..c3f77590c 100644
--- a/ppcls/arch/backbone/legendary_models/hrnet.py
+++ b/ppcls/arch/backbone/legendary_models/hrnet.py
@@ -43,6 +43,8 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/HRNet_W64_C_pretrained.pdparams"
 }
 
+MODEL_STAGES_PATTERN = {"HRNet": ["st4"]}
+
 __all__ = list(MODEL_URLS.keys())
 
 
@@ -368,10 +370,12 @@ class HRNet(TheseusLayer):
     """
 
     def __init__(self,
+                 stages_pattern,
                  width=18,
                  has_se=False,
                  class_num=1000,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
 
         self.width = width
@@ -460,8 +464,11 @@ class HRNet(TheseusLayer):
             2048,
             class_num,
             weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.conv_layer1_1(x)
@@ -517,7 +524,8 @@ def HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W18_C` model depends on args.
     """
-    model = HRNet(width=18, **kwargs)
+    model = HRNet(
+        width=18, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W18_C"], use_ssld)
     return model
 
@@ -532,7 +540,8 @@ def HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W30_C` model depends on args.
     """
-    model = HRNet(width=30, **kwargs)
+    model = HRNet(
+        width=30, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W30_C"], use_ssld)
     return model
 
@@ -547,7 +556,8 @@ def HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W32_C` model depends on args.
     """
-    model = HRNet(width=32, **kwargs)
+    model = HRNet(
+        width=32, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W32_C"], use_ssld)
     return model
 
@@ -562,7 +572,8 @@ def HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W40_C` model depends on args.
     """
-    model = HRNet(width=40, **kwargs)
+    model = HRNet(
+        width=40, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W40_C"], use_ssld)
     return model
 
@@ -577,7 +588,8 @@ def HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W44_C` model depends on args.
     """
-    model = HRNet(width=44, **kwargs)
+    model = HRNet(
+        width=44, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W44_C"], use_ssld)
     return model
 
@@ -592,7 +604,8 @@ def HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W48_C` model depends on args.
     """
-    model = HRNet(width=48, **kwargs)
+    model = HRNet(
+        width=48, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W48_C"], use_ssld)
     return model
 
@@ -607,7 +620,8 @@ def HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W60_C` model depends on args.
     """
-    model = HRNet(width=60, **kwargs)
+    model = HRNet(
+        width=60, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W60_C"], use_ssld)
     return model
 
@@ -622,7 +636,8 @@ def HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `HRNet_W64_C` model depends on args.
     """
-    model = HRNet(width=64, **kwargs)
+    model = HRNet(
+        width=64, stages_pattern=MODEL_STAGES_PATTERN["HRNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["HRNet_W64_C"], use_ssld)
     return model
 
@@ -637,7 +652,11 @@ def SE_HRNet_W18_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W18_C` model depends on args.
     """
-    model = HRNet(width=18, has_se=True, **kwargs)
+    model = HRNet(
+        width=18,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W18_C"], use_ssld)
     return model
 
@@ -652,7 +671,11 @@ def SE_HRNet_W30_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W30_C` model depends on args.
     """
-    model = HRNet(width=30, has_se=True, **kwargs)
+    model = HRNet(
+        width=30,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W30_C"], use_ssld)
     return model
 
@@ -667,7 +690,11 @@ def SE_HRNet_W32_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W32_C` model depends on args.
     """
-    model = HRNet(width=32, has_se=True, **kwargs)
+    model = HRNet(
+        width=32,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W32_C"], use_ssld)
     return model
 
@@ -682,7 +709,11 @@ def SE_HRNet_W40_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W40_C` model depends on args.
     """
-    model = HRNet(width=40, has_se=True, **kwargs)
+    model = HRNet(
+        width=40,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W40_C"], use_ssld)
     return model
 
@@ -697,7 +728,11 @@ def SE_HRNet_W44_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W44_C` model depends on args.
     """
-    model = HRNet(width=44, has_se=True, **kwargs)
+    model = HRNet(
+        width=44,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W44_C"], use_ssld)
     return model
 
@@ -712,7 +747,11 @@ def SE_HRNet_W48_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W48_C` model depends on args.
     """
-    model = HRNet(width=48, has_se=True, **kwargs)
+    model = HRNet(
+        width=48,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W48_C"], use_ssld)
     return model
 
@@ -727,7 +766,11 @@ def SE_HRNet_W60_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W60_C` model depends on args.
     """
-    model = HRNet(width=60, has_se=True, **kwargs)
+    model = HRNet(
+        width=60,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W60_C"], use_ssld)
     return model
 
@@ -742,6 +785,10 @@ def SE_HRNet_W64_C(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `SE_HRNet_W64_C` model depends on args.
     """
-    model = HRNet(width=64, has_se=True, **kwargs)
+    model = HRNet(
+        width=64,
+        stages_pattern=MODEL_STAGES_PATTERN["HRNet"],
+        has_se=True,
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["SE_HRNet_W64_C"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/inception_v3.py b/ppcls/arch/backbone/legendary_models/inception_v3.py
index c5ccc3dc9..6901ba646 100644
--- a/ppcls/arch/backbone/legendary_models/inception_v3.py
+++ b/ppcls/arch/backbone/legendary_models/inception_v3.py
@@ -29,6 +29,14 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/InceptionV3_pretrained.pdparams"
 }
 
+MODEL_STAGES_PATTERN = {
+    "InceptionV3": [
+        "inception_block_list[2]", "inception_block_list[3]",
+        "inception_block_list[7]", "inception_block_list[8]",
+        "inception_block_list[10]"
+    ]
+}
+
 __all__ = MODEL_URLS.keys()
 '''
 InceptionV3 config: dict.
@@ -454,7 +462,12 @@ class Inception_V3(TheseusLayer):
         model: nn.Layer. Specific Inception_V3 model depends on args.
     """
 
-    def __init__(self, config, class_num=1000, return_patterns=None):
+    def __init__(self,
+                 config,
+                 stages_pattern,
+                 class_num=1000,
+                 return_patterns=None,
+                 stages_pattern=None):
         super().__init__()
 
         self.inception_a_list = config["inception_a"]
@@ -496,8 +509,11 @@ class Inception_V3(TheseusLayer):
             class_num,
             weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)),
             bias_attr=ParamAttr())
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.inception_stem(x)
@@ -533,6 +549,9 @@ def InceptionV3(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `InceptionV3` model 
     """
-    model = Inception_V3(NET_CONFIG, **kwargs)
+    model = Inception_V3(
+        NET_CONFIG,
+        stages_pattern=MODEL_STAGES_PATTERN["InceptionV3"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["InceptionV3"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/mobilenet_v1.py b/ppcls/arch/backbone/legendary_models/mobilenet_v1.py
index 8bda78d5c..9767d69b3 100644
--- a/ppcls/arch/backbone/legendary_models/mobilenet_v1.py
+++ b/ppcls/arch/backbone/legendary_models/mobilenet_v1.py
@@ -34,6 +34,10 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV1_pretrained.pdparams"
 }
 
+MODEL_STAGES_PATTERN = {
+    "MobileNetV1": ["blocks[0]", "blocks[2]", "blocks[4]", "blocks[10]"]
+}
+
 __all__ = MODEL_URLS.keys()
 
 
@@ -102,7 +106,12 @@ class MobileNet(TheseusLayer):
         model: nn.Layer. Specific MobileNet model depends on args.
     """
 
-    def __init__(self, scale=1.0, class_num=1000, return_patterns=None):
+    def __init__(self,
+                 stages_pattern,
+                 scale=1.0,
+                 class_num=1000,
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
         self.scale = scale
 
@@ -145,8 +154,11 @@ class MobileNet(TheseusLayer):
             int(1024 * scale),
             class_num,
             weight_attr=ParamAttr(initializer=KaimingNormal()))
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.conv(x)
@@ -180,7 +192,10 @@ def MobileNetV1_x0_25(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `MobileNetV1_x0_25` model depends on args.
     """
-    model = MobileNet(scale=0.25, **kwargs)
+    model = MobileNet(
+        scale=0.25,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_25"],
                      use_ssld)
     return model
@@ -196,7 +211,10 @@ def MobileNetV1_x0_5(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `MobileNetV1_x0_5` model depends on args.
     """
-    model = MobileNet(scale=0.5, **kwargs)
+    model = MobileNet(
+        scale=0.5,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_5"],
                      use_ssld)
     return model
@@ -212,7 +230,10 @@ def MobileNetV1_x0_75(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `MobileNetV1_x0_75` model depends on args.
     """
-    model = MobileNet(scale=0.75, **kwargs)
+    model = MobileNet(
+        scale=0.75,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1_x0_75"],
                      use_ssld)
     return model
@@ -228,6 +249,9 @@ def MobileNetV1(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `MobileNetV1` model depends on args.
     """
-    model = MobileNet(scale=1.0, **kwargs)
+    model = MobileNet(
+        scale=1.0,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV1"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV1"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/mobilenet_v3.py b/ppcls/arch/backbone/legendary_models/mobilenet_v3.py
index 36661abf0..836c54cd2 100644
--- a/ppcls/arch/backbone/legendary_models/mobilenet_v3.py
+++ b/ppcls/arch/backbone/legendary_models/mobilenet_v3.py
@@ -45,6 +45,13 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/MobileNetV3_large_x1_25_pretrained.pdparams",
 }
 
+MODEL_STAGES_PATTERN = {
+    "MobileNetV3_small":
+    ["blocks[0]", "blocks[2]", "blocks[7]", "blocks[10]"],
+    "MobileNetV3_large":
+    ["blocks[0]", "blocks[2]", "blocks[5]", "blocks[11]", "blocks[14]"]
+}
+
 __all__ = MODEL_URLS.keys()
 
 # "large", "small" is just for MobinetV3_large, MobileNetV3_small respectively.
@@ -137,13 +144,15 @@ class MobileNetV3(TheseusLayer):
 
     def __init__(self,
                  config,
+                 stages_pattern,
                  scale=1.0,
                  class_num=1000,
                  inplanes=STEM_CONV_NUMBER,
                  class_squeeze=LAST_SECOND_CONV_LARGE,
                  class_expand=LAST_CONV,
                  dropout_prob=0.2,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
 
         self.cfg = config
@@ -203,8 +212,11 @@ class MobileNetV3(TheseusLayer):
         self.flatten = nn.Flatten(start_axis=1, stop_axis=-1)
 
         self.fc = Linear(self.class_expand, class_num)
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.conv(x)
@@ -377,6 +389,7 @@ def MobileNetV3_small_x0_35(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["small"],
         scale=0.35,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_SMALL,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_35"],
@@ -397,6 +410,7 @@ def MobileNetV3_small_x0_5(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["small"],
         scale=0.5,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_SMALL,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_5"],
@@ -417,6 +431,7 @@ def MobileNetV3_small_x0_75(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["small"],
         scale=0.75,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_SMALL,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x0_75"],
@@ -437,6 +452,7 @@ def MobileNetV3_small_x1_0(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["small"],
         scale=1.0,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_SMALL,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x1_0"],
@@ -457,6 +473,7 @@ def MobileNetV3_small_x1_25(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["small"],
         scale=1.25,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_SMALL,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_small_x1_25"],
@@ -477,6 +494,7 @@ def MobileNetV3_large_x0_35(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["large"],
         scale=0.35,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_small"],
         class_squeeze=LAST_SECOND_CONV_LARGE,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_35"],
@@ -497,6 +515,7 @@ def MobileNetV3_large_x0_5(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["large"],
         scale=0.5,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"],
         class_squeeze=LAST_SECOND_CONV_LARGE,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_5"],
@@ -517,6 +536,7 @@ def MobileNetV3_large_x0_75(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["large"],
         scale=0.75,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"],
         class_squeeze=LAST_SECOND_CONV_LARGE,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x0_75"],
@@ -537,6 +557,7 @@ def MobileNetV3_large_x1_0(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["large"],
         scale=1.0,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"],
         class_squeeze=LAST_SECOND_CONV_LARGE,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x1_0"],
@@ -557,6 +578,7 @@ def MobileNetV3_large_x1_25(pretrained=False, use_ssld=False, **kwargs):
     model = MobileNetV3(
         config=NET_CONFIG["large"],
         scale=1.25,
+        stages_pattern=MODEL_STAGES_PATTERN["MobileNetV3_large"],
         class_squeeze=LAST_SECOND_CONV_LARGE,
         **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["MobileNetV3_large_x1_25"],
diff --git a/ppcls/arch/backbone/legendary_models/pp_lcnet.py b/ppcls/arch/backbone/legendary_models/pp_lcnet.py
index 327980f37..401746220 100644
--- a/ppcls/arch/backbone/legendary_models/pp_lcnet.py
+++ b/ppcls/arch/backbone/legendary_models/pp_lcnet.py
@@ -42,6 +42,10 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/PPLCNet_x2_5_pretrained.pdparams"
 }
 
+MODEL_STAGES_PATTERN = {
+    "PPLCNet": ["blocks2", "blocks3", "blocks4", "blocks5", "blocks6"]
+}
+
 __all__ = list(MODEL_URLS.keys())
 
 # Each element(list) represents a depthwise block, which is composed of k, in_c, out_c, s, use_se.
@@ -168,11 +172,13 @@ class SEModule(TheseusLayer):
 
 class PPLCNet(TheseusLayer):
     def __init__(self,
+                 stages_pattern,
                  scale=1.0,
                  class_num=1000,
                  dropout_prob=0.2,
                  class_expand=1280,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
         self.scale = scale
         self.class_expand = class_expand
@@ -249,8 +255,10 @@ class PPLCNet(TheseusLayer):
 
         self.fc = Linear(self.class_expand, class_num)
 
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         x = self.conv1(x)
@@ -293,7 +301,8 @@ def PPLCNet_x0_25(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x0_25` model depends on args.
     """
-    model = PPLCNet(scale=0.25, **kwargs)
+    model = PPLCNet(
+        scale=0.25, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_25"], use_ssld)
     return model
 
@@ -308,7 +317,8 @@ def PPLCNet_x0_35(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x0_35` model depends on args.
     """
-    model = PPLCNet(scale=0.35, **kwargs)
+    model = PPLCNet(
+        scale=0.35, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_35"], use_ssld)
     return model
 
@@ -323,7 +333,8 @@ def PPLCNet_x0_5(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x0_5` model depends on args.
     """
-    model = PPLCNet(scale=0.5, **kwargs)
+    model = PPLCNet(
+        scale=0.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_5"], use_ssld)
     return model
 
@@ -338,7 +349,8 @@ def PPLCNet_x0_75(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x0_75` model depends on args.
     """
-    model = PPLCNet(scale=0.75, **kwargs)
+    model = PPLCNet(
+        scale=0.75, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x0_75"], use_ssld)
     return model
 
@@ -353,7 +365,8 @@ def PPLCNet_x1_0(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x1_0` model depends on args.
     """
-    model = PPLCNet(scale=1.0, **kwargs)
+    model = PPLCNet(
+        scale=1.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_0"], use_ssld)
     return model
 
@@ -368,7 +381,8 @@ def PPLCNet_x1_5(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x1_5` model depends on args.
     """
-    model = PPLCNet(scale=1.5, **kwargs)
+    model = PPLCNet(
+        scale=1.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x1_5"], use_ssld)
     return model
 
@@ -383,7 +397,8 @@ def PPLCNet_x2_0(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x2_0` model depends on args.
     """
-    model = PPLCNet(scale=2.0, **kwargs)
+    model = PPLCNet(
+        scale=2.0, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_0"], use_ssld)
     return model
 
@@ -398,6 +413,7 @@ def PPLCNet_x2_5(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `PPLCNet_x2_5` model depends on args.
     """
-    model = PPLCNet(scale=2.5, **kwargs)
+    model = PPLCNet(
+        scale=2.5, stages_pattern=MODEL_STAGES_PATTERN["PPLCNet"], **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["PPLCNet_x2_5"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/resnet.py b/ppcls/arch/backbone/legendary_models/resnet.py
index f37cfef9f..74c5c5fa6 100644
--- a/ppcls/arch/backbone/legendary_models/resnet.py
+++ b/ppcls/arch/backbone/legendary_models/resnet.py
@@ -51,6 +51,15 @@ MODEL_URLS = {
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/ResNet200_vd_pretrained.pdparams",
 }
 
+MODEL_STAGES_PATTERN = {
+    "ResNet18": ["blocks[1]", "blocks[3]", "blocks[5]", "blocks[7]"],
+    "ResNet34": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"],
+    "ResNet50": ["blocks[2]", "blocks[6]", "blocks[12]", "blocks[15]"],
+    "ResNet101": ["blocks[2]", "blocks[6]", "blocks[29]", "blocks[32]"],
+    "ResNet152": ["blocks[2]", "blocks[10]", "blocks[46]", "blocks[49]"],
+    "ResNet200": ["blocks[2]", "blocks[14]", "blocks[62]", "blocks[65]"]
+}
+
 __all__ = MODEL_URLS.keys()
 '''
 ResNet config: dict.
@@ -265,12 +274,14 @@ class ResNet(TheseusLayer):
 
     def __init__(self,
                  config,
+                 stages_pattern,
                  version="vb",
                  class_num=1000,
                  lr_mult_list=[1.0, 1.0, 1.0, 1.0, 1.0],
                  data_format="NCHW",
                  input_image_channel=3,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
 
         self.cfg = config
@@ -338,8 +349,11 @@ class ResNet(TheseusLayer):
             weight_attr=ParamAttr(initializer=Uniform(-stdv, stdv)))
 
         self.data_format = data_format
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, x):
         with paddle.static.amp.fp16_guard():
@@ -378,7 +392,11 @@ def ResNet18(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet18` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["18"], version="vb", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["18"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet18"],
+        version="vb",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet18"], use_ssld)
     return model
 
@@ -393,7 +411,11 @@ def ResNet18_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet18_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["18"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["18"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet18"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet18_vd"], use_ssld)
     return model
 
@@ -408,7 +430,11 @@ def ResNet34(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet34` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["34"], version="vb", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["34"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet34"],
+        version="vb",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet34"], use_ssld)
     return model
 
@@ -423,7 +449,11 @@ def ResNet34_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet34_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["34"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["34"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet34"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet34_vd"], use_ssld)
     return model
 
@@ -438,7 +468,11 @@ def ResNet50(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet50` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["50"], version="vb", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["50"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet50"],
+        version="vb",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet50"], use_ssld)
     return model
 
@@ -453,7 +487,11 @@ def ResNet50_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet50_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["50"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["50"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet50"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet50_vd"], use_ssld)
     return model
 
@@ -468,7 +506,11 @@ def ResNet101(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet101` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["101"], version="vb", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["101"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet101"],
+        version="vb",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet101"], use_ssld)
     return model
 
@@ -483,7 +525,11 @@ def ResNet101_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet101_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["101"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["101"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet101"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet101_vd"], use_ssld)
     return model
 
@@ -498,7 +544,11 @@ def ResNet152(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet152` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["152"], version="vb", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["152"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet152"],
+        version="vb",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet152"], use_ssld)
     return model
 
@@ -513,7 +563,11 @@ def ResNet152_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet152_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["152"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["152"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet152"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet152_vd"], use_ssld)
     return model
 
@@ -528,6 +582,10 @@ def ResNet200_vd(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `ResNet200_vd` model depends on args.
     """
-    model = ResNet(config=NET_CONFIG["200"], version="vd", **kwargs)
+    model = ResNet(
+        config=NET_CONFIG["200"],
+        stages_pattern=MODEL_STAGES_PATTERN["ResNet200"],
+        version="vd",
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["ResNet200_vd"], use_ssld)
     return model
diff --git a/ppcls/arch/backbone/legendary_models/vgg.py b/ppcls/arch/backbone/legendary_models/vgg.py
index 9316e12d3..74d5cfad6 100644
--- a/ppcls/arch/backbone/legendary_models/vgg.py
+++ b/ppcls/arch/backbone/legendary_models/vgg.py
@@ -31,6 +31,14 @@ MODEL_URLS = {
     "VGG19":
     "https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/legendary_models/VGG19_pretrained.pdparams",
 }
+
+MODEL_STAGES_PATTERN = {
+    "VGG": [
+        "conv_block_1", "conv_block_2", "conv_block_3", "conv_block_4",
+        "conv_block_5"
+    ]
+}
+
 __all__ = MODEL_URLS.keys()
 
 # VGG config
@@ -113,9 +121,11 @@ class VGGNet(TheseusLayer):
 
     def __init__(self,
                  config,
+                 stages_pattern,
                  stop_grad_layers=0,
                  class_num=1000,
-                 return_patterns=None):
+                 return_patterns=None,
+                 return_stages=None):
         super().__init__()
 
         self.stop_grad_layers = stop_grad_layers
@@ -141,8 +151,11 @@ class VGGNet(TheseusLayer):
         self.fc1 = Linear(7 * 7 * 512, 4096)
         self.fc2 = Linear(4096, 4096)
         self.fc3 = Linear(4096, class_num)
-        if return_patterns is not None:
-            self.update_res(return_patterns)
+
+        super().init_res(
+            stages_pattern,
+            return_patterns=return_patterns,
+            return_stages=return_stages)
 
     def forward(self, inputs):
         x = self.conv_block_1(inputs)
@@ -184,7 +197,10 @@ def VGG11(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `VGG11` model depends on args.
     """
-    model = VGGNet(config=NET_CONFIG[11], **kwargs)
+    model = VGGNet(
+        config=NET_CONFIG[11],
+        stages_pattern=MODEL_STAGES_PATTERN["VGG"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["VGG11"], use_ssld)
     return model
 
@@ -199,7 +215,10 @@ def VGG13(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `VGG13` model depends on args.
     """
-    model = VGGNet(config=NET_CONFIG[13], **kwargs)
+    model = VGGNet(
+        config=NET_CONFIG[13],
+        stages_pattern=MODEL_STAGES_PATTERN["VGG"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["VGG13"], use_ssld)
     return model
 
@@ -214,7 +233,10 @@ def VGG16(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `VGG16` model depends on args.
     """
-    model = VGGNet(config=NET_CONFIG[16], **kwargs)
+    model = VGGNet(
+        config=NET_CONFIG[16],
+        stages_pattern=MODEL_STAGES_PATTERN["VGG"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["VGG16"], use_ssld)
     return model
 
@@ -229,6 +251,9 @@ def VGG19(pretrained=False, use_ssld=False, **kwargs):
     Returns:
         model: nn.Layer. Specific `VGG19` model depends on args.
     """
-    model = VGGNet(config=NET_CONFIG[19], **kwargs)
+    model = VGGNet(
+        config=NET_CONFIG[19],
+        stages_pattern=MODEL_STAGES_PATTERN["VGG"],
+        **kwargs)
     _load_pretrained(pretrained, model, MODEL_URLS["VGG19"], use_ssld)
     return model