training model neural network

4 次查看(过去 30 天)
if reduceDataset
numUniqueLabels = numel(unique(adsTrain.Labels));
% Reduce the dataset by a factor of 20
adsTrain = splitEachLabel(adsTrain,round(numel(adsTrain.Files) / numUniqueLabels / 20));
adsValidation = splitEachLabel(adsValidation,round(numel(adsValidation.Files)/ numUniqueLabels / 20));
end
error line 2 showing statement will never execute

采纳的回答

Rik
Rik 2021-3-12
This sounds like an mlint warning. The source would be reduceDataset being false, which would cause the entire if block to be skipped.
  1 个评论
Prashant Saini
Prashant Saini 2021-3-12
ads = audioDatastore(fullfile('datafiles','train'), ...
'IncludeSubfolders',true, ...
'FileExtensions','.wav', ...
'LabelSource','foldernames');
commands = categorical(["backward","forward","left","right","stop"]);
isCommand = ismember(ads.Labels,commands);
isUnknown = ~ismember(ads.Labels,[commands,"_background_noise_"]);
includeFraction = 0.2;
mask = rand(numel(ads.Labels),1) < includeFraction;
isUnknown = isUnknown & mask;
ads.Labels(isUnknown) = categorical("unknown");
adsTrain = subset(ads,isCommand|isUnknown);
countEachLabel(ads)
ads = audioDatastore(fullfile('datafiles', 'validation'), ...
'IncludeSubfolders',true, ...
'FileExtensions','.wav', ...
'LabelSource','foldernames');
isCommand = ismember(ads.Labels,commands);
isUnknown = ~isCommand;
includeFraction = 0.2;
mask = rand(numel(ads.Labels),1) < includeFraction;
isUnknown = isUnknown & mask;
ads.Labels(isUnknown) = categorical("unknown");
adsValidation = subset(ads,isCommand|isUnknown);
countEachLabel(adsValidation)
reduceceDataset=false;
if reduceDataset
numUniqueLabels = numel(unique(adsTrain.Labels));
% Reduce the dataset by a factor of 20
adsTrain = splitEachLabel(adsTrain,round(numel(adsTrain.Files) / numUniqueLabels / 20));
adsValidation = splitEachLabel(adsValidation,round(numel(adsValidation.Files)/ numUniqueLabels / 20));
end
fs = 16e3;
segmentDuration = 1;
frameDuration = 0.025;
hopDuration = 0.010;
segmentSamples = round(segmentDuration*fs);
frameSamples = round(frameDuration*fs);
hopSamples = round(hopDuration*fs);
overlapSamples = frameSamples - hopSamples;
FFTLength = 512;
numBands = 50;
afe = audioFeatureExtractor( ...
'SampleRate',fs, ...
'FFTLength',FFTLength, ...
'Window',hann(frameSamples,'periodic'), ...
'OverlapLength',overlapSamples, ...
'barkSpectrum',true);
setExtractorParams(afe,"barkSpectrum","NumBands",50);
x = read(adsTrain);
numSamples = size(x,1);
numToPadFront = floor( (segmentSamples - numSamples)/2 );
numToPadBack = ceil( (segmentSamples - numSamples)/2 );
xPadded = [zeros(numToPadFront,1,'like',x);x;zeros(numToPadBack,1,'like',x)];
features = extract(afe,xPadded);
[numHops,numFeatures] = size(features);
if ~isempty(ver('parallel')) && ~reduceDataset
pool = gcp;
numPar = numpartitions(adsTrain,pool);
else
numPar = 1;
end
parfor ii = 1:numPar
subds = partition(adsTrain,numPar,ii);
XTrain = zeros(numHops,numBands,1,numel(subds.Files));
for idx = 1:numel(subds.Files)
x = read(subds);
xPadded = [zeros(floor((segmentSamplessize(x,1))/2),1);x;zeros(ceil((segmentSamples-size(x,1))/2),1)];
XTrain(:,:,:,idx) = extract(afe,xPadded);
end
XTrainC{ii} = XTrain;
end
XTrain = cat(4,XTrainC{:});
[numHops,numBands,numChannels,numSpec] = size(XTrain);
epsil = 1e-6;
XTrain = log10(XTrain + epsil);
if ~isempty(ver('parallel'))
pool = gcp;
numPar = numpartitions(adsValidation,pool);
else
numPar = 1;
end
parfor ii = 1:numPar
subds = partition(adsValidation,numPar,ii);
XValidation = zeros(numHops,numBands,1,numel(subds.Files));
for idx = 1:numel(subds.Files)
x = read(subds);
xPadded = [zeros(floor((segmentSamplessize(x,1))/2),1);x;zeros(ceil((segmentSamples-size(x,1))/2),1)];XValidation(:,:,:,idx) = extract(afe,xPadded);
end
XValidationC{ii} = XValidation;
end
XValidation = cat(4,XValidationC{:});
XValidation = log10(XValidation + epsil);
YTrain = removecats(adsTrain.Labels);
YValidation = removecats(adsValidation.Labels);
specMin = min(XTrain,[],'all');
specMax = max(XTrain,[],'all');
idx = randperm(numel(adsTrain.Files),3);
figure('Units','normalized','Position',[0.2 0.2 0.6 0.6]);
for i = 1:3
[x,fs] = audioread(adsTrain.Files{idx(i)});
subplot(2,3,i)
plot(x)
axis tight
title(string(adsTrain.Labels(idx(i))))
subplot(2,3,i+3)
spect = (XTrain(:,:,1,idx(i))');
pcolor(spect)
caxis([specMin specMax])
shading flat
sound(x,fs)
pause(2)
end
adsBkg = audioDatastore(fullfile('datafiles', 'background'));
numBkgClips = 4000;
if reduceDataset
numBkgClips =numBkgClips/20;
end
volumeRange = log10([1e-4,1]);
numBkgFiles = numel(adsBkg.Files);
numClipsPerFile =histcounts(1:numBkgClips,linspace(1,numBkgClips,numBkgFiles+1));
Xbkg = zeros(size(XTrain,1),size(XTrain,2),1,numBkgClips,'single');
bkgAll = readall(adsBkg);
ind = 1;
for count = 1:numBkgFiles
bkg = bkgAll{count};
idxStart = randi(numel(bkg)-fs,numClipsPerFile(count),1);
idxEnd = idxStart+fs-1;
gain = 10.^((volumeRange(2)-volumeRange(1))*rand(numClipsPerFile(count),1) +volumeRange(1));
for j = 1:numClipsPerFile(count)
x = bkg(idxStart(j):idxEnd(j))*gain(j);
x = max(min(x,1),-1);
Xbkg(:,:,:,ind) = extract(afe,x);
if mod(ind,1000)==0
disp("Processed " + string(ind) + " background clips out of " +string(numBkgClips))
end
ind = ind + 1;
end
end
Xbkg = log10(Xbkg + epsil);
numTrainBkg = floor(0.85*numBkgClips);
numValidationBkg = floor(0.15*numBkgClips);
XTrain(:,:,:,end+1:end+numTrainBkg) = Xbkg(:,:,:,1:numTrainBkg);
YTrain(end+1:end+numTrainBkg) = "background";
XValidation(:,:,:,end+1:end+numValidationBkg) = Xbkg(:,:,:,numTrainBkg+1:end);
YValidation(end+1:end+numValidationBkg) = "background";
figure('Units','normalized','Position',[0.2 0.2 0.5 0.5])
subplot(2,1,1)
histogram(YTrain)
title("Training Label Distribution")
subplot(2,1,2)
histogram(YValidation)
title("Validation Label Distribution")
classWeights = 1./countcats(YTrain);
classWeights = classWeights/mean(classWeights);
numClasses = numel(categories(YTrain));
timePoolSize = ceil(numHops/8);
dropoutProb = 0.2;
numF = 12;
layers = [
imageInputLayer([numHops numBands])
convolution2dLayer(3,numF,'Padding','same')
batchNormalizationLayer
reluLayer
maxPooling2dLayer(3,'Stride',2,'Padding','same')
convolution2dLayer(3,2*numF,'Padding','same')
batchNormalizationLayer
reluLayer
maxPooling2dLayer(3,'Stride',2,'Padding','same')
convolution2dLayer(3,4*numF,'Padding','same')
batchNormalizationLayer
reluLayer
maxPooling2dLayer(3,'Stride',2,'Padding','same')
convolution2dLayer(3,4*numF,'Padding','same')
batchNormalizationLayer
reluLayer
convolution2dLayer(3,4*numF,'Padding','same')
batchNormalizationLayer
reluLayer
maxPooling2dLayer([timePoolSize,1])
dropoutLayer(dropoutProb)
fullyConnectedLayer(numClasses)
softmaxLayer
classificationLayer];
miniBatchSize = 128;
validationFrequency = floor(numel(YTrain)/miniBatchSize);
options = trainingOptions('adam', ...
'InitialLearnRate',3e-4, ...
'MaxEpochs',25, ...
'MiniBatchSize',miniBatchSize, ...
'Shuffle','every-epoch', ...
'Plots','training-progress', ...
'Verbose',false, ...
'ValidationData',{XValidation,YValidation}, ...
'ValidationFrequency',validationFrequency, ...
'LearnRateSchedule','piecewise', ...
'LearnRateDropFactor',0.1, ...
'LearnRateDropPeriod',20);
trainedNet = trainNetwork(XTrain,YTrain,layers,options);
if reduceDataset
load('commandNet.mat','trainedNet');
end
YValPred = classify(trainedNet,XValidation);
validationError = mean(YValPred ~= YValidation);
YTrainPred = classify(trainedNet,XTrain);
trainError = mean(YTrainPred ~= YTrain);
disp("Training error: " + trainError*100 + "%")
disp("Validation error: " + validationError*100 + "%")
figure('Units','normalized','Position',[0.2 0.2 0.5 0.5]);
cm = confusionchart(YValidation,YValPred);
cm.Title = 'Confusion Matrix for Validation Data';
cm.ColumnSummary = 'column-normalized';
cm.RowSummary = 'row-normalized';
this is complete code could you help me where is the mistake i'm doing
current error showing:
Error using wheel (line 87)
An UndefinedFunction error was thrown on the workers for 'segmentSamplessize'. This might be because the file containing
'segmentSamplessize' is not accessible on the workers. Use addAttachedFiles(pool, files) to specify the required files to
be attached. For more information, see the documentation for 'parallel.Pool/addAttachedFiles'.
Caused by:
Undefined function 'segmentSamplessize' for input arguments of type 'double'.

请先登录,再进行评论。

更多回答(0 个)

类别

Help CenterFile Exchange 中查找有关 Deep Learning Toolbox 的更多信息

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!

Translated by