%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Matlab script to perform cross validation on a model generated for % numeric data from agriculture % % purpose: data mining with SVMs vs. an MLP (neural network) % % requires: % - statistics toolbox for cvpartition (from Matlab2008a) % - neural network toolbox % - readColData script (see link below) % - SVMTorch (compiled version, of course) % - data structure used for the results (cv_results) % % new: % - comparison of SVM result vs. NNet result % - fixed SVM and fixed NNET parameters % - included normalization (reversing model results) % ----- % Georg Ru{\ss} % russ@iws.cs.uni-magdeburg.de % 2008-10-10 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %% Preparation steps, workspace % clean workspace clear all; % set clock clock_start = clock; % seed random for reproducible results %rand('seed',1); % change paths to wherever your data is located and readable to matlab % uses script readColData from % http://web.cecs.pdx.edu/~gerry/MATLAB/plotting/loadingPlotData.html#colHeadings [label,id_column,data]=readColData('sorted_all',10,10,1); %% data-specific stuff % generate three data sets for the nnet to play with % one: N1, Yield2003, EM38 -- target: Yield2004 % two: N1, Yield2003, EM38, N2, REIP32 -- target: Yield2004 % three: N1, Yield2003, EM38, N2, REIP32, N3, REIP49 -- target: Yield2004 % works by eliminating the respective columns from the 'data' matrix above set_1 = data; set_1(:,7) = []; set_1(:,5) = []; set_1(:,4) = []; set_1(:,3) = []; set_1(:,2) = []; set_2 = data; set_2(:,7) = []; set_2(:,5) = []; set_2(:,3) = []; % only set_3 is actually used here set_3 = data; set_3(:,7) = []; Size_set_3 = size(set_3); %% modeling stage % partitioning parameters for cross validation k=10; % k-fold holdout cross validation p = 1/k; % normalize data (requires double transposition of data matrix) [set_3_norm, PS] = mapstd(transpose(set_3)); set_3 = transpose(set_3_norm); % where to store the results modelresults = struct('j',[],'mae_svm',[],'rmse_svm',[], 'mae_mlp',[], 'rmse_mlp', []); modelresults.mae_rbf = []; modelresults.rmse_rbf = []; j_res=[]; % generate data partition (cvpartition from matlab's statistics toolbox) % size of holdout (test) set: 1/k % (yields indices of data set) cvdata = cvpartition(Size_set_3(1,1),'Holdout',p) for j = 1:50 % structured object to store actual, predicted and error values (for each % model) rbfresults = struct('actual',[], 'prediction', [], 'abserr', [], 'squerr',[]); j_res = vertcat(j_res,j); TrainSet = struct(); TrainSet.all = set_3(cvdata.training,:); TrainSet.size = size(TrainSet.all); TrainSet.examples = TrainSet.all(:,1:TrainSet.size(1,2)-1); TrainSet.targets = TrainSet.all(:,TrainSet.size(1,2)); TestSet = struct(); TestSet.all = set_3(cvdata.test,:); TestSet.size = size(TestSet.all); TestSet.examples = TestSet.all(:,1:TestSet.size(1,2)-1); TestSet.targets = TestSet.all(:,TestSet.size(1,2)); % ---------------------------------------- % begin RBF stuff target_error = 0; spread = 1; max_neurons = j*2; df = 100; [rbf, tr] = newrb(transpose(TrainSet.examples),transpose(TrainSet.targets), target_error, spread, max_neurons, df); rbfresults.prediction = transpose(sim(rbf,transpose(TestSet.examples))); rbfresults.actual = TestSet.targets; % un-normalize rbf results to original ranges rbfresults.prediction = (rbfresults.prediction - PS.ymean) * PS.xstd(8)/PS.ystd + PS.xmean(8); rbfresults.actual = (rbfresults.actual - PS.ymean) * PS.xstd(8)/PS.ystd + PS.xmean(8); % end RBF stuff % ---------------------------------------- % ---------------------------------------- % collect error values % generate error measures and store them inside the respective % results struct rbfresults.abserr = abs(rbfresults.actual - rbfresults.prediction); rbfresults.squerr = (rbfresults.abserr).^2; % calculate mae and rmse rbfmae = mean(rbfresults.abserr) rbfrmse = sqrt(mean(rbfresults.squerr)) % store mae and rmse modelresults.mae_rbf = vertcat(modelresults.mae_rbf, rbfmae); modelresults.rmse_rbf = vertcat(modelresults.rmse_rbf, rbfrmse); end %% plot mae and rmse against j (trial run number, simply collected) e = figure; plot(j_res*2,modelresults.mae_rbf,'kx-'); legend('mae rbf'); title('mean absolute error of RBF'); xlabel('neurons in hidden layer'); ylabel('error'); hold on; h = figure; plot(j_res*2,modelresults.rmse_rbf,'kx-') legend('rmse rbf'); title('root mean squared error of RBF'); xlabel('neurons in hidden layer'); ylabel('error'); % corr = figure; % plot(modelresults.rmse_mlp,modelresults.rmse_svm,'.'); % hold on; % title('rmse mlp vs. rmse svm'); % xlabel('rmse mlp'); % ylabel('rmse svm'); %% get script stats duration = etime(clock, clock_start)