fix and enable flake8 E721 (#12258)

pull/12520/head
Wang Xin 2024-05-27 11:12:05 +08:00 committed by GitHub
parent 739400f151
commit ea29b423d5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 12 additions and 12 deletions

View File

@ -43,7 +43,7 @@ repos:
- id: flake8
args:
- --count
- --select=E9,F63,F7,F82
- --select=E9,F63,F7,F82,E721
- --show-source
- --statistics
exclude: ^benchmark/|^test_tipc/

View File

@ -78,9 +78,9 @@ class DistillationDMLLoss(DMLLoss):
def _check_maps_name(self, maps_name):
if maps_name is None:
return None
elif type(maps_name) == str:
elif isinstance(maps_name, str):
return [maps_name]
elif type(maps_name) == list:
elif isinstance(maps_name, list):
return [maps_name]
else:
return None
@ -174,9 +174,9 @@ class DistillationKLDivLoss(KLDivLoss):
def _check_maps_name(self, maps_name):
if maps_name is None:
return None
elif type(maps_name) == str:
elif isinstance(maps_name, str):
return [maps_name]
elif type(maps_name) == list:
elif isinstance(maps_name, list):
return [maps_name]
else:
return None
@ -282,9 +282,9 @@ class DistillationDKDLoss(DKDLoss):
def _check_maps_name(self, maps_name):
if maps_name is None:
return None
elif type(maps_name) == str:
elif isinstance(maps_name, str):
return [maps_name]
elif type(maps_name) == list:
elif isinstance(maps_name, list):
return [maps_name]
else:
return None
@ -428,9 +428,9 @@ class DistillationKLDivLoss(KLDivLoss):
def _check_maps_name(self, maps_name):
if maps_name is None:
return None
elif type(maps_name) == str:
elif isinstance(maps_name, str):
return [maps_name]
elif type(maps_name) == list:
elif isinstance(maps_name, list):
return [maps_name]
else:
return None
@ -536,9 +536,9 @@ class DistillationDKDLoss(DKDLoss):
def _check_maps_name(self, maps_name):
if maps_name is None:
return None
elif type(maps_name) == str:
elif isinstance(maps_name, str):
return [maps_name]
elif type(maps_name) == list:
elif isinstance(maps_name, list):
return [maps_name]
else:
return None

View File

@ -209,7 +209,7 @@ def train(
if "global_step" in pre_best_model_dict:
global_step = pre_best_model_dict["global_step"]
start_eval_step = 0
if type(eval_batch_step) == list and len(eval_batch_step) >= 2:
if isinstance(eval_batch_step, list) and len(eval_batch_step) >= 2:
start_eval_step = eval_batch_step[0] if not eval_batch_epoch else 0
eval_batch_step = (
eval_batch_step[1]