Every line of 'from keras.optimizers import adam' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
146 def get_optimizer(model, opt, lr=None, adam_alpha=None, adam_beta1=None, 147 adam_beta2=None, adam_eps=None, weight_decay=None): 148 if opt == 'MomentumSGD': 149 optimizer = optimizers.MomentumSGD(lr=lr, momentum=0.9) 150 elif opt == 'Adam': 151 optimizer = optimizers.Adam( 152 alpha=adam_alpha, beta1=adam_beta1, 153 beta2=adam_beta2, eps=adam_eps) 154 elif opt == 'AdaGrad': 155 optimizer = optimizers.AdaGrad(lr=lr) 156 elif opt == 'RMSprop': 157 optimizer = optimizers.RMSprop(lr=lr) 158 else: 159 raise Exception('No optimizer is selected') 160 161 # The first model as the master model 162 optimizer.setup(model) 163 if opt == 'MomentumSGD': 164 optimizer.add_hook( 165 chainer.optimizer.WeightDecay(weight_decay)) 166 167 return optimizer