Binary Cross Entropy (BCE) is a common error function used in classification problems.It is also known as the log loss function.
Advantages
Disadvantages
import numpy as np
def binary_cross_entropy(y, y_hat):
return -np.mean(y * np.log(y_hat) + (1 - y) * np.log(1 - y_hat))
binary_cross_entropy <- function(y, y_hat) {
return(-mean(y * log(y_hat) + (1 - y) * log(1 - y_hat)))
}
function binary_cross_entropy(y, y_hat)
return -mean(y .* log.(y_hat) + (1 .- y) .* log.(1 .- y_hat))
end
from sklearn.metrics import log_loss
def binary_cross_entropy(y, y_hat):
return log_loss(y, y_hat)
import tensorflow as tf
def binary_cross_entropy(y, y_hat):
return tf.keras.losses.binary_crossentropy(y, y_hat)
import torch
def binary_cross_entropy(y, y_hat):
return torch.nn.functional.binary_cross_entropy(y, y_hat)