Every line of 'pytorch dropout example' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
179 def test_dropout(self): 180 input_sample_shape = (64, 1, 12) 181 dropout = layer.Dropout('drop', input_sample_shape=input_sample_shape) 182 out_sample_shape = dropout.get_output_sample_shape() 183 self.check_shape(out_sample_shape, input_sample_shape)
8 def dropout(x, ratio=.5, **kwargs): 9 """dropout regularization 10 Even though it scales its input at training, 11 we do not consider it in Lipschitz constant. 12 13 :param x: input (vector/tensor, label, lipschitz) 14 :param ratio: dropout ratio 15 :return: 16 """ 17 x, t, l = x 18 x = chainer.functions.dropout(x, ratio=ratio, **kwargs) 19 return x, t, l
76 def dropout(input, *args, **kwargs): 77 output = F.dropout(input.F, *args, **kwargs) 78 return SparseTensor(output, coords_key=input.coords_key, coords_manager=input.coords_man)
69 def forward(self, input_tensor): 70 emb = self.encoder_with_dropout(input_tensor, dropout=self.dropout_embedding if self.training else 0) 71 return self.dropout_input(emb)
192 def dropout(x, keep_prob): 193 """ During training, performs dropout. Otherwise, returns original.""" 194 output = tf.nn.dropout(x, keep_prob) if drop else x 195 return output
70 @hparams(default=0.0, required=False) 71 def dropout(self): 72 pass
62 def Dropout(x, rate, training) : 63 return tf.layers.dropout(inputs=x, rate=rate, training=training)
346 def test_dropout(): 347 """Tests whether dropout layer reads in probability correctly""" 348 rnn = RNN(layers_info=[["lstm", 20], ["gru", 10], ["linear", 20], ["linear", 1]], 349 hidden_activations="relu", output_activation="sigmoid", dropout=0.9999, 350 initialiser="xavier") 351 assert rnn.dropout_layer.rate == 0.9999 352 assert not solves_simple_problem(X, y, rnn) 353 rnn = RNN(layers_info=[["lstm", 20], ["gru", 10], ["linear", 20], ["linear", 1]], 354 hidden_activations="relu", output_activation=None, dropout=0.0000001, 355 initialiser="xavier") 356 assert rnn.dropout_layer.rate == 0.0000001 357 assert solves_simple_problem(X, y, rnn)