Thursday, April 28, 2011

Soft Margin 2-norm SVM

function [ weights bias ] = TannSchmidVectorMachineSoftMarginDos( K, H, labels, C)
%Toggle details which kernel we use

[xm xn] = size(K);
[ym yn] = size(labels);

%scale C down
C = C / xm;
H = (1/2) * (H + (1 / C) * eye(xm, xn));

%check to make sure training & labels have same dimension and toggle is
%valid

if xm ~= ym
    display('Sorry, this is an idiot proof function. Try feeding in valid parameters next time, doof!');
    return;
end

%allocate space for different parts
f = zeros(xm, 1);
A = zeros(xm + 4, xm);
b = zeros(xm + 4, 1);

%build constraints matrix
A(1,:) = labels';
A(2,:) = -labels';
A(3,:) = ones(1, xm);
A(4,:) = -ones(1, xm);
for i = 1:xm
    A(i+4, i) = -1;
end

b = [0; 0; 1; -1; zeros(xm, 1)];
          
[weights v] = quadprog(H, f, A, b);

%find the bias
bias = GetSoftBiasDos(weights, K, labels, C);
bias = bias / sqrt(weights' * H * weights);

save('recordedResults0', 'weights', 'bias', 'K');

No comments:

Post a Comment