function [model,maxC] = kfoldCV(classification,trainingData,k,cExpMax,maxDataPerClass)
% kfoldCV does a k-fold Crossvalidation of parameter c for SVM
noClasses=size(unique(classification),1);
if cExpMax>0 %CV only if necessary
cvAccuracy=zeros(2*cExpMax+1,1);
for cExp=1:2*cExpMax+1 % try different values for c
c=2^(cExp-cExpMax-1);
randomMapping=transpose(randperm(size(trainingData,1)));
accuracy=zeros(k,3);
parfor i=1:k
% split in training and test set
trainData=trainingData(mod(randomMapping,k)+1~=i,:,:);
testData=trainingData(mod(randomMapping,k)+1==i,:,:);
trainClasses=classification(mod(randomMapping,k)+1~=i);
testClasses=classification(mod(randomMapping,k)+1==i);
%fprintf('i=%i, k=%i, c=%i\n',i,k,c)
[trainClasses,trainData]=balanceClasses(trainClasses,trainData,...
maxDataPerClass,noClasses);
model=svmtrain(trainClasses,trainData(:,:),sprintf('-t 0 -c %f -q',c));
[~, accuracy(i,:), ~]=svmpredict(testClasses, testData(:,:), model,'-q');
end
%for each value for c take mean accuracy as accuracy
cvAccuracy(cExp)=mean(accuracy(:,1));
end
[~,maxC]=max(cvAccuracy);
else
maxC=1; %no gridsearch since only one C possible
end
bestC=2^(maxC-cExpMax-1);
[balancedClasses, balanceData]=balanceClasses(classification,trainingData,...
maxDataPerClass,noClasses);
model=svmtrain(balancedClasses, balanceData(:,:),sprintf('-t 0 -c %f -q',bestC));
end