diff --git a/common/functions.py b/common/functions.py index a1d4f712..514def2b 100644 --- a/common/functions.py +++ b/common/functions.py @@ -53,7 +53,7 @@ def cross_entropy_error(y, t): t = t.argmax(axis=1) batch_size = y.shape[0] - return -np.sum(np.log(y[np.arange(batch_size), t])) / batch_size + return -np.sum(np.log(y[np.arange(batch_size), t] + 1e-7)) / batch_size def softmax_loss(X, t):