Every line of 'relu activation function python' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
88 def activation(self, z): 89 return z
112 def activation_leaky_relu(alpha): 113 with tf.variable_scope("leaky_relu"): 114 def op(inputs): 115 return tf.maximum(alpha * inputs, inputs, name='leaky_relu') 116 return op
6 @staticmethod 7 def relu(input_matrix): 8 output = np.maximum(input_matrix, 0, input_matrix) 9 10 return output
270 @layer 271 def lrelu(self, input, leaky_value, name): 272 return lrelu(input, leaky_value, name=name)
61 def _activation(): 62 return Activation("relu")
136 def ReLu(x): 137 return theano.tensor.switch(x<0, 0, x)
15 def f(z): 16 return z
515 def activation(self, input_signal): 516 517 """ 518 Linear activation applied to input provided 519 520 Args: 521 input_signal (numpy.array): the input numpy array 522 523 Returns: 524 numpy.array: the output of the Linear function applied to the input 525 """ 526 527 return input_signal
16 def linear_activation(x): 17 return x
38 def lrelu(x, alpha=0.2): 39 return T.nnet.relu(x, alpha)