From 9112ba2d783edc3d44371b81403646bc9a22cab3 Mon Sep 17 00:00:00 2001 From: quadri-haider-ali Date: Tue, 6 Oct 2020 20:09:19 +0530 Subject: [PATCH] added SparseCategoricalCrossEntropy Loss Function --- MLlib/loss_func.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/MLlib/loss_func.py b/MLlib/loss_func.py index 4020c9d..c4f12e3 100644 --- a/MLlib/loss_func.py +++ b/MLlib/loss_func.py @@ -1,5 +1,6 @@ import numpy as np from activations import sigmoid +from activations import softmax class MeanSquaredError(): @@ -38,3 +39,30 @@ def derivative(X,Y,W): AbsError=(np.dot(X,W).T-Y) return np.dot(np.divide(AbsError,np.absolute(AbsError),out=np.zeros_like(AbsError), where=(np.absolute(AbsError))!=0),X).T/M +class SparseCategoricalCrossEntropy(): + @staticmethod + def loss(X,Y,W,n): + # n = total number of classes for classification + # W of dimension (X.shape[1],n) + Yprime = [] + for ydash in Y: + for y in ydash: + y=int(y) + yprime = list([0]*y+[1]+[0]*(n-y-1)) + Yprime.append(yprime) + Yprime = np.array(Yprime) + H=np.dot(X,W) + for i in range(len(H)): + H[i]=softmax(H[i]) + loss=0 + for y in range(len(Yprime)): + for feature_index in range(len(Yprime[y])): + if Yprime[y][feature_index] ==1: + loss-=np.log(H[y][feature_index]) + return loss + + @staticmethod + def derivative(X, Y, W): + M=X.shape[0] + H=sigmoid(X) + return (1/M)*(np.dot(X.T, (H-T).T))