From c5525a3049f69b14aa02c60de23c2ddffbe87807 Mon Sep 17 00:00:00 2001 From: fanqiNO1 <1848839264@qq.com> Date: Thu, 13 Jul 2023 18:50:37 +0800 Subject: [PATCH] [Fix] Override __getattr__ --- mmpretrain/models/peft/lora.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/mmpretrain/models/peft/lora.py b/mmpretrain/models/peft/lora.py index 0e6a6596..b33c3bb6 100644 --- a/mmpretrain/models/peft/lora.py +++ b/mmpretrain/models/peft/lora.py @@ -1,7 +1,7 @@ # Copyright (c) OpenMMLab. All rights reserved. import math import re -from typing import List +from typing import Any, List import torch from mmengine.logging import print_log @@ -195,9 +195,11 @@ class LoRAModel(BaseModule): self.register_load_state_dict_post_hook(_load_state_dict_post_hook) - def get_layer_depth(self, param_name: str, prefix: str = ''): - """Get the layer-wise depth of a parameter for learning rate decay.""" - return self.module.get_layer_depth(param_name, prefix) - def forward(self, *args, **kwargs): return self.module(*args, **kwargs) + + def __getattr__(self, name: str) -> Any: + try: + return super(LoRAModel, self).__getattr__(name) + except AttributeError: + return self.module.__getattribute__(name)