Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- %%%%%% LOAD DATA %%%%%%%
- raw = load('podaci_564.mat');
- matrix = (raw.data)';
- input = matrix(:, 1:2);
- output = matrix(:,3);
- negative = input(find(output == 0),:);
- positive = input(find(output == 1),:);
- %a - visualize data classes
- figure('Name', 'Data classes');
- %0 - red crosses
- plot(negative(:,1), negative(:,2),'r+');
- hold on;
- %1 - blue circles
- plot(positive(:,1), positive(:,2),'bo');
- %pause;
- hold off;
- % %NOTE - first 450 data samples are class 0 , next 114 are class 1,
- % %for test randomize data - shuffle
- % %b - make test dataset (15% or 20% of original size)
- input_len = length(input);
- shuffled_array = randperm(input_len);
- input = input(shuffled_array,:);
- output = output(shuffled_array);
- training_offset = int32(0.7 * input_len);
- cross_val_offset = training_offset + int32(0.15 * input_len);
- training_input = input(1:training_offset, :);
- training_output = output(1:training_offset, :);
- crossval_input = input(training_offset + 1:cross_val_offset, :);
- crossval_output = output(training_offset + 1:cross_val_offset, :);
- test_input = input(cross_val_offset + 1:input_len, :);
- test_output = output(cross_val_offset + 1:input_len, :);
- %c - neural nwtwork creation
- %trainlm default
- %net = feedforwardnet([3 2]);
- net = feedforwardnet([4 3]);
- net.layers{3}.transferFcn = 'tansig';
- %[trainInd,valInd,testInd] = dividerand(length(input));
- net.divideParam.trainRatio = 1; % training set [%]
- net.divideParam.valRatio = 0; % validation set [%]
- net.divideParam.testRatio = 0; % test set [%]
- % neural network training
- [net,tr,Y,E] = train(net,training_input',training_output');
- % view nn
- view(net);
- %netoutput_training = sim(net, training_input');
- %plotconfusion(training_output,Y);
- %d - cross validation optimization
- best_acc =0;
- best_structure =[3 3];
- best_trainFcn = 'poslin';
- best_reg = 0.2;
- best_c1_weight = (1);
- best_f1 = 0;
- new_training_input = [training_input; crossval_input];
- new_training_output = [training_output; crossval_output];
- %transfer_functions = create_tf_combinations();
- for structure = {[2 2] [2 3] [3 3] [ 3 4] [4 4] [4 5] [5 6] [5 5] [6 6] [8 8] [10 10] [10 12] [12 12]}
- for trainFcn = create_tf_combinations();
- for reg = {0,0.14, 0.05, 0.2 0.3, 0.5, 0.8}
- net = feedforwardnet(structure{1});
- net.trainparam.showWindow = false;
- net.divideParam.trainRatio = 1; % training set [%]
- net.divideParam.valRatio = 0; % validation set [%]
- net.divideParam.testRatio = 0; % test set [%] net.trainParam.Mu = 10;
- net.performParam.regularization = reg{1};
- net.layers{1}.transferFcn = trainFcn{1}{1};
- net.layers{2}.transferFcn = trainFcn{1}{2};
- net.layers{3}.transferFcn = trainFcn{1}{3};
- fprintf('Searching for the best hyperparameters....\n');
- % [net,tr,Y,E] = train(net,crossval_input',crossval_output');
- % netoutput = sim(net, crossval_input');
- % current_f1 = calculate_f1(crossval_output', netoutput);
- [net,tr,Y,E] = train(net,new_training_input',new_training_output');
- %netoutput_training = sim(net, new_training_input');
- %current_f1 = calculate_f1(new_training_output', netoutput_training);
- [precision,recall] = calculate_conf_params(new_training_output', Y);
- current_f1 = 2 * precision * recall / (precision + recall);
- if(current_f1 > best_f1)
- best_f1 = current_f1;
- best_structure = structure{1};
- best_reg = reg{1};
- best_trainFcn = trainFcn{1};
- end
- end
- end
- end
- fprintf('-----------Best hyperparameters-----------\n');
- fprintf('Best layers structure: layer 1 = %d, layer 2 = %d\n', best_structure(1), best_structure(2));
- fprintf('Optimal regularization parameter:%2.2f\n', best_reg);
- fprintf('Optimal transfer function(1) = %s, transfer function(2) = %s, transfer function(3) = %s\n'...
- ,best_trainFcn{1}, best_trainFcn{2}, best_trainFcn{3});
- fprintf('-----------Best hyperparameters-----------\n');
- net = feedforwardnet(best_structure);
- net.divideParam.trainRatio = 1; % training set [%]
- net.divideParam.valRatio = 0; % validation set [%]
- net.divideParam.testRatio = 0; % test set [%] net.trainParam.Mu = 10;
- net.performParam.regularization = best_reg;
- net.layers{1}.transferFcn = best_trainFcn{1};
- net.layers{2}.transferFcn = best_trainFcn{2};
- net.layers{3}.transferFcn = best_trainFcn{3};
- %figure
- [net,tr,Y,E] = train(net,new_training_input',new_training_output');
- %netoutput_training = sim(net, training_input');
- %plotconfusion(training_output', netoutput_training);
- %hold on;
- %[net,tr,Y,E] = train(net,new_training_input',new_training_output');
- netoutput_training = sim(net, new_training_input');
- [precision,recall] = calculate_conf_params(new_training_output', netoutput_training);
- fprintf('Training + validation dataset results: precision = %4.4f, recall = %4.4f\n',...
- precision, recall);
- %[net,tr,Y,E] = train(net,test_input',test_output');
- netoutput_test = sim(net, test_input');
- [precision,recall] = calculate_conf_params(test_output', netoutput_test);
- fprintf('Test dataset results: precision = %4.4f, recall = %4.4f\n',...
- precision, recall);
- plotconfusion(new_training_output', netoutput_training, ...
- 'Training + validation',test_output', netoutput_test, 'Test');
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement