# 10 examples of 'binary cross entropy keras' in Python

Every line of 'binary cross entropy keras' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure. ## All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
``52def cross_entropy(self, y):53    return T.nnet.categorical_crossentropy(self.p_y_given_x, y)``
``12def weighted_binary_cross_entropy(targets, predictions, class_weights):1314	predictions = tf.clip_by_value(predictions,1e-7,1-1e-7)15	return tf.reduce_mean(class_weights*(targets*tf.log(predictions) + (1-targets)*tf.log(1-predictions)), axis=get_reduce_axis(targets))``
``298def _cross_entropy(self, x, labels):299    x = tf.reshape(x, [-1, self.num_classes])300    cross_entropy = -tf.reduce_sum(301        (labels * tf.math.log(tf.clip_by_value(x, 1e-10, 1.0))),302        axis=303    )304    cross_entropy_mean = tf.reduce_mean(cross_entropy, name="cross_entropy_mean")305306    return cross_entropy_mean``
``113def cross_entropy(self, y):114    if self.mini_batch:115        return T.mean(T.sum(T.nnet.categorical_crossentropy(self.y_t, y), axis=1))  # naive batch-normalization116    else:117        return T.sum(T.nnet.categorical_crossentropy(self.y_t, y))``
``123def binary_crossentropy(predictions, targets):124    """Computes the binary cross-entropy between predictions and targets.125126    .. math:: L = -t \\log(p) - (1 - t) \\log(1 - p)127128    Parameters129    ----------130    predictions : Theano tensor131        Predictions in (0, 1), such as sigmoidal output of a neural network.132    targets : Theano tensor133        Targets in [0, 1], such as ground truth labels.134135    Returns136    -------137    Theano tensor138        An expression for the element-wise binary cross-entropy.139140    Notes141    -----142    This is the loss function of choice for binary classification problems143    and sigmoid output units.144    """145    predictions, targets = align_targets(predictions, targets)146    return theano.tensor.nnet.binary_crossentropy(predictions, targets)``
``22def cross_entropy(self, y_true, y_pred):23    y_pred = K.maximum(K.minimum(y_pred, 1 - 1e-15), 1e-15)24    cross_entropy_loss = - K.sum(y_true * K.log(y_pred), axis=-1)25    return cross_entropy_loss``
``7def WeightedBinaryCrossEntropy(x_true, eps): 8	def WeightedBinaryCrossEntropy_(y_true, y_pred):9		err = -((y_true*K.log(y_pred)) + ((1-y_true)*K.log(1-y_pred)))1011		probs = K.mean(x_true,axis=(1,2,3),keepdims=True)12		weights_pos, weights_neg = 1./(probs+eps), 1./((1-probs)+eps)13		weights = (x_true*weights_pos) + ((1-x_true)*weights_neg)1415		return K.mean(err*weights)1617	return WeightedBinaryCrossEntropy_``
``17def cross_entropy(logit, prob):18    return K.sum(prob * K.tf.nn.log_softmax(logit), axis = 1)``
``32def cross_entropy_loss(y, yhat):33  """34  Compute the cross entropy loss in tensorflow.3536  y is a one-hot tensor of shape (n_samples, n_classes) and yhat is a tensor37  of shape (n_samples, n_classes). y should be of dtype tf.int32, and yhat should38  be of dtype tf.float32.3940  The functions tf.to_float, tf.reduce_sum, and tf.log might prove useful. (Many41  solutions are possible, so you may not need to use all of these functions).4243  Note: You are NOT allowed to use the tensorflow built-in cross-entropy44        functions.4546  Args:47    y:    tf.Tensor with shape (n_samples, n_classes). One-hot encoded.48    yhat: tf.Tensorwith shape (n_sample, n_classes). Each row encodes a49          probability distribution and should sum to 1.50  Returns:51    out:  tf.Tensor with shape (1,) (Scalar output). You need to construct this52          tensor in the problem.53  """54  ### YOUR CODE HERE55  out = tf.reduce_sum(-tf.to_float(y) * tf.log(yhat))56  ### END YOUR CODE57  return out``
``60def batch_crossentropy(label, logits):61    """Calculates the cross-entropy for a batch of logits.6263    Parameters64    ----------65    logits : array_like66        The logits predicted by the model for a batch of inputs.67    label : int68        The label describing the target distribution.6970    Returns71    -------72    np.ndarray73        The cross-entropy between softmax(logits[i]) and onehot(label)74        for all i.7576    """7778    assert logits.ndim == 27980    # for numerical reasons we subtract the max logit81    # (mathematically it doesn't matter!)82    # otherwise exp(logits) might become too large or too small83    logits = logits - np.max(logits, axis=1, keepdims=True)84    e = np.exp(logits)85    s = np.sum(e, axis=1)86    ces = np.log(s) - logits[:, label]87    return ces``