From bc4ef4861b7649d2743b431f2bd68d32acf3af60 Mon Sep 17 00:00:00 2001 From: Alex Stoken Date: Wed, 24 Jun 2020 10:07:43 -0500 Subject: [PATCH] Default optimizer SGD --- train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/train.py b/train.py index 73456a065..a3f43d6db 100644 --- a/train.py +++ b/train.py @@ -20,7 +20,7 @@ except: # Hyperparameters -hyp = {'optimizer': 'adam' #if none, default is SGD +hyp = {'optimizer': 'SGD', # ['adam, 'SGD', None] if none, default is SGD 'lr0': 0.01, # initial learning rate (SGD=1E-2, Adam=1E-3) 'momentum': 0.937, # SGD momentum/Adam beta1 'weight_decay': 5e-4, # optimizer weight decay