Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- rng(1)
- datafolder = "C:\Users\leere\Documents\MATLAB\Centrifugal Pump";
- ads = audioDatastore(datafolder, ...
- 'IncludeSubfolders',true, ...
- 'FileExtensions','.wav', ...
- 'LabelSource','foldernames');
- % save Image to folder, and create the relevant subfolder
- datafolder_scallogram = "datafolder/structure_Scallogram";
- catego=categories(ads.Labels);
- for i = 1:1:length(catego)
- imgLoc = fullfile(datafolder_scallogram,char(catego{i}));
- mkdir(imgLoc)
- end
- helperCreateRGBfromTF(ads,datafolder_scallogram)
- allImages = imageDatastore(datafolder_scallogram,...
- 'IncludeSubfolders',true,...
- 'LabelSource','foldernames');
- rng(1)
- [imgsTrain,imgsValidation,imgsTest] = splitEachLabel(allImages,0.6,0.2,'randomized');
- disp(['Number of training images: ',num2str(numel(imgsTrain.Files))]);
- countEachLabel(imgsTrain)
- disp(['Number of validation images: ',num2str(numel(imgsValidation.Files))]);
- countEachLabel(imgsValidation)
- disp(['Number of validation images: ',num2str(numel(imgsTest.Files))]);
- countEachLabel(imgsTest)
- net = googlenet;
- net.Layers(1)
- inputSize = net.Layers(1).InputSize;
- if isa(net,'SeriesNetwork')
- lgraph = layerGraph(net.Layers);
- else
- lgraph = layerGraph(net);
- end
- [learnableLayer,classLayer] = findLayersToReplace(lgraph);
- [learnableLayer,classLayer]
- numClasses = numel(categories(imgsTrain.Labels));
- if isa(learnableLayer,'nnet.cnn.layer.FullyConnectedLayer')
- newLearnableLayer = fullyConnectedLayer(numClasses, ...
- 'Name','new_fc', ...
- 'WeightLearnRateFactor',10, ...
- 'BiasLearnRateFactor',10);
- elseif isa(learnableLayer,'nnet.cnn.layer.Convolution2DLayer')
- newLearnableLayer = convolution2dLayer(1,numClasses, ...
- 'Name','new_conv', ...
- 'WeightLearnRateFactor',10, ...
- 'BiasLearnRateFactor',10);
- end
- lgraph = replaceLayer(lgraph,learnableLayer.Name,newLearnableLayer);
- newClassLayer = classificationLayer('Name','new_classoutput');
- lgraph = replaceLayer(lgraph,classLayer.Name,newClassLayer);
- figure('Units','normalized','Position',[0.3 0.3 0.4 0.4]);
- plot(lgraph)
- ylim([0,10])
- miniBatchSize = 16;
- valFrequency = floor(numel(imgsTrain.Files)/miniBatchSize);
- options = trainingOptions('sgdm', ...
- 'MiniBatchSize',miniBatchSize, ...
- 'MaxEpochs',6, ...
- 'InitialLearnRate',3e-4, ...
- 'Shuffle','every-epoch', ...
- 'ValidationData',imgsValidation, ...
- 'ValidationFrequency',valFrequency, ...
- 'Verbose',false, ...
- 'Plots','training-progress');
- reset(gpuDevice())
- net = trainNetwork(imgsTrain,lgraph,options);
- [YPred,probs] = classify(net,imgsTest);
- accuracy = mean(YPred == imgsTest.Labels)
- idx = randperm(numel(imgsTest.Files),4);
- figure
- for i = 1:4
- subplot(2,2,i)
- I = readimage(imgsTest,idx(i));
- imshow(I)
- label = YPred(idx(i));
- title(string(label) + ", " + num2str(100*max(probs(idx(i),:)),3) + "%");
- end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement