Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
AlbiCarle authored Feb 8, 2022
1 parent fb093c7 commit e30fbaf
Show file tree
Hide file tree
Showing 12 changed files with 619 additions and 0 deletions.
17 changes: 17 additions & 0 deletions KernelMatrix.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
function K = KernelMatrix(X1, X2, kernel, param)

% Usage: K = KernelMatrix(X1, X2, kernel, param)
% X1 and X2 are the two collections of points on which to compute the Gram matrix

%kernel = 'linear', 'polynoial', 'gaussian'
if numel(kernel) == 0
kernel = 'linear';
end
if isequal(kernel, 'linear')
K = X1*X2';
elseif isequal(kernel, 'polynomial')
K = (1 + X1*X2').^param;
elseif isequal(kernel, 'gaussian')
K = exp(-1/(2*param^2)*SquareDist(X1,X2));
end
end
29 changes: 29 additions & 0 deletions MixGauss.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
function [X, Y] = MixGauss(means, sigmas, n)

% function [X, Y] = MixGauss(means, sigmas, n)

% EXAMPLE: [X, Y] = MixGauss([[0;0],[1;1]],[0.5,0.25],1000);
% generates a 2D dataset with two classes, the first one centered on (0,0)
% with standard deviation 0.5, the second one centered on (1,1) with standard deviation 0.25.
% each class will contain 1000 points

d = size(means,1);
p = size(means,2);

X = [];
Y = [];
for i = 1:p
m = means(:,i);
S = sigmas(i);
Xi = zeros(n,d);
Yi = zeros(n,1);
for j = 1:n
x = S*randn(d,1) + m;
Xi(j,:) = x;
Yi(j) = i-1;
end
X = [X; Xi];
Y = [Y; Yi];
end


69 changes: 69 additions & 0 deletions OptimiseParam_NSVDD.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
function [s, Vm, Vs, Tm, Ts] = OptimiseParam_NSVDD(X, Y, kernel, perc, nrip, intKerPar, C1, C2)

%Usage

%[s, Vm, Vs, Tm, Ts] = OptimiseParam_NSVDD(X, Y, kernel, perc, nrip, intKerPar, C1, C2)
% X: training set
% Y: labels of training set
% kernel: 'linear, 'gaussian', 'polynomial'
% perc: percentage of the dataset to be used for validation
% nrip: number of repetitions of the test for the parameter
% intKerPar: list of kernel parameters
% C1, C2: SVDD weights
%
% Output:
% s: kernel parameter that minimize the median of the
% validation error
% Vm, Vs: median and variance of the validation error for the parameter
% Tm, Ts: median and variance of the error computed on the training set for the parameter


nKerPar = numel(intKerPar);

n = size(X,1);
ntr = ceil(n*(1-perc));

tmn = zeros(nKerPar, nrip);
vmn = zeros(nKerPar, nrip);

for rip = 1:nrip
I = randperm(n);
Xtr = X(I(1:ntr),:);
Ytr = Y(I(1:ntr),:);
Xvl = X(I(ntr+1:end),:);
Yvl = Y(I(ntr+1:end),:);


is = 0;
for param=intKerPar
is = is + 1;

[alpha, Rsquared,~,~,~] = ...
SVDD_N1C_TRAINING(Xtr, Ytr, kernel, param, C1, C2, 'off');

tmn(is, rip) = ...
calcErr(SVDD_N1C_TEST(Xtr, Ytr, alpha, Xtr, kernel, param, Rsquared), Ytr);
vmn(is, rip) = ...
calcErr(SVDD_N1C_TEST(Xtr, Ytr, alpha, Xvl, kernel, param, Rsquared), Yvl);

end

disp(['Opt_iter ', num2str(rip)]);

end

Tm = median(tmn,2);
Ts = std(tmn,0,2);
Vm = median(vmn,2);
Vs = std(vmn,0,2);

[row, col] = find(Vm <= min(min(Vm)));

s = intKerPar(row(1));

end

function err = calcErr(T, Y)
n=size(T,1);
err=sum(T~=Y)/n;
end
46 changes: 46 additions & 0 deletions RadiusReductionSVDD.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
function R_star = RadiusReductionSVDD(Xtr, Ytr, alpha, Xts, kernel, param, Rsquared, treshold)

% RadiusReductionSVDD

% Usage: R_star = RadiusReductionSVDD(Xtr, Ytr, alpha, Xts, kernel, param, Rsquared, treshold)

% Xtr: training set
% Ytr: labels of training set
% alpha: lagrange multipliers of SVDD
% Xts: test set
% param: kernel parameter
% Rsquared: squared radius of the SVDD
% kernel: 'linear, 'gaussian', 'polynomial'
% treshold: percentage of FP to be achieved

maxiter = 1000;

i = 1;

Rsq = Rsquared;

while(i<maxiter)

Rsq = Rsq-10e-4;

y = SVDD_N1C_TEST(Xtr, Ytr, alpha, Xts, kernel, param, Rsq);

Y = [y Yts];

TN = sum(Y(:,1)==-1 & Y(:,2)==-1);
FN = sum(Y(:,1)==-1 & Y(:,2)==+1);
TP = sum(Y(:,1)==+1 & Y(:,2)==+1);
FP = sum(Y(:,1)==+1 & Y(:,2)==-1);

FPR=FP/N;

if(FPR<treshold)
R_star = Rsq;
break;
end

i=i+1;

disp(['Iteration --> ',num2str(i)])

end
17 changes: 17 additions & 0 deletions SVDD_N1C_TEST.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
function y = SVDD_N1C_TEST(Xtr, Ytr, alpha, Xts, kernel, param, Rsquared)

% SVDD_N1C_TEST
% Usage: y = SVDD_N1C_TEST(Xtr, Ytr, alpha, Xts, kernel, param, Rsquared)

% Xtr: training set
% Ytr: labels of training set
% Xts: test set
% Kernel: 'linear, 'gaussian', 'polynomial'
% param: kernel parameter
% Rsquared: radius of the SVDD

Tts = TestObject_N(Xtr, Ytr, alpha, Xts, kernel, param);
y = sign(Rsquared-Tts);

end

87 changes: 87 additions & 0 deletions SVDD_N1C_TRAINING.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
function [alpha, Rsquared,a,SV,YSV] = SVDD_N1C_TRAINING(Xtr, Ytr, kernel, param, C1, C2, qdprg_opts)

% SVDD_N1C_TRAINING
% Usage: [alpha, Rsquared,a,SV,YSV] = SVDD_N1C_TRAINING(Xtr, Ytr, kernel, param, C1, C2, qdprg_opts)

% Xtr: training set
% Ytr: labels of training set
% Kernel: 'linear, 'gaussian', 'polynomial'
% param: kernel parameter
% C1, C2: SVDD weights
% qdprg_opts: display optimization informations

if(size(Ytr(Ytr==-1),1)==0)

disp('Error: there must be a target class and a negative class')

else
n=size(Xtr,1);

if (isequal(kernel,'linear') || isequal(kernel,'polynomial'))

Ztr = Xtr+10;
Ztr = normalize(Ztr, 2,'norm',2);

else

Ztr = Xtr;

end

K = KernelMatrix(Ztr, Ztr, kernel, param);

% Computing alpha, maximizing L=sum_i alpha_i*(x_i*x_i)-sum_l alpha_l*(x_l*x_l)
% -sum_(i,j) alpha_i*alpha_j*(x_i*x_j)
% +2sum_(l,j)alpha_l*alpha_j*(x_l*x_j)
% -sum_(l,m) alpha_l*alpha_m*(x_l*x_m),
% by using quadprog with L=1/2x'Hx+f'x

H=Ytr*Ytr'.*K;
H=H+H';
f=Ytr.*diag(K);

lb = zeros(n,1);
ub = ones(n,1);
ub(Ytr==-1,1)=C2;
ub(Ytr==+1,1)=C1;
Aeq = ones(1,n);
Aeq(1,Ytr==-1)=-1;
Aeq(1,Ytr==+1)=+1;
beq = 1;

if isequal(qdprg_opts,'on')
options = optimset('Display', 'on');
elseif isequal(qdprg_opts,'off')
options = optimset('Display', 'off');
end

alpha = quadprog(H,f,[],[],Aeq,beq,lb,ub,[],options);

% Center

a = alpha'*(Ytr.*Xtr);

% Support Vectors

inc=1E-5;

idxSV = find(all(abs(alpha)>inc & abs(alpha)<C1-inc,2) | all(abs(alpha)>inc & abs(alpha)<C2-inc,2));
SV = Xtr(idxSV,:);
YSV = Ytr(idxSV,:);


if(size(SV,1)>0)

rand=randperm(size(SV,1),1);

x_s = SV(rand,:);

Rsquared = TestObject_N(Xtr, Ytr, alpha, x_s, kernel, param);
else
Rsquared=0;
end
end




9 changes: 9 additions & 0 deletions SquareDist.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
function D = SquareDist(X1, X2)
n = size(X1,1);
m = size(X2,1);

sq1 = sum(X1.*X1,2);
sq2 = sum(X2.*X2,2);

D = sq1*ones(1,m) + ones(n,1)*sq2' - 2*(X1*X2');
end
36 changes: 36 additions & 0 deletions TestObject_N.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
function T=TestObject_N(Xtr, Ytr, alpha, Z, kernel, param)

% TestObject_N
% Usage: T=TestObject_N(Xtr, Ytr, alpha, Z, kernel, param)

% Xtr: training set
% Ytr: labels of training set
% alpha: lagrange multipliers of SVDD
% Z: test object
% Kernel: 'linear, 'gaussian', 'polynomial'
% param: kernel parameter

alf_i=alpha(Ytr==+1,1);
alf_l=alpha(Ytr==-1,1);

flag_i=find(all(Ytr==+1,2));
flag_l=find(all(Ytr==-1,2));

X_i=Xtr(flag_i,:);
X_l=Xtr(flag_l,:);

K_i=KernelMatrix(X_i, X_i, kernel, param);
K_l=KernelMatrix(X_l, X_l, kernel, param);

Zker=KernelMatrix(Z, Z, kernel, param);
Kz=diag(Zker);

KZX_i=KernelMatrix(Z,X_i,kernel,param);
KZX_l=KernelMatrix(Z,X_l,kernel,param);

KX_lX_i=KernelMatrix(X_l, X_i, kernel, param);

T=Kz-2*(KZX_i*alf_i-KZX_l*alf_l)+ ...
+alf_i'*K_i*alf_i-2*alf_l'*KX_lX_i*alf_i+alf_l'*K_l*alf_l;

end
Loading

0 comments on commit e30fbaf

Please sign in to comment.