check the category in which given data belonging to.

1 次查看(过去 30 天)
I have trained the given dataset into 4 different category target using artificial neural network tool(nprtool). now I have to check the new data with trained set and display the category of the data belonging to.please someone help me to solve the problem. here is the code, i didn't understand the first line of code (i.e X,~,~) what it refers. where i have to give new sample as input in this network.
if true
if true
function [Y,Xf,Af] = myNeuralNetworkFunction(X,~,~)
%MYNEURALNETWORKFUNCTION neural network simulation function.
%
% Generated by Neural Network Toolbox function genFunction, 18-Jul-2018 10:37:01.
%
% [Y] = myNeuralNetworkFunction(X,~,~) takes these arguments:
%
% X = 1xTS cell, 1 inputs over TS timesteps
% Each X{1,ts} = 2988xQ matrix, input #1 at timestep ts.
%
% and returns:
% Y = 1xTS cell of 1 outputs over TS timesteps.
% Each Y{1,ts} = 4xQ matrix, output #1 at timestep ts.
%
% where Q is number of samples (or series) and TS is the number of timesteps.
%#ok<*RPMT0>
% ===== NEURAL NETWORK CONSTANTS =====
% Input 1 x1_step1.xoffset = [36.6032980307133;36.597744090219;0.158385245033053;1.31643417503494;36.6049269523476;-1.37584123238539;11.0299405848326;12.504831642294;0.288194444444444;0.332638888888889;28.8194444444444;33.2638888888889;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0.770633759029644;0.768310906303693;0.766707167608389;0.764830097334775;0.762377934568729;0.762331857496514;0.763674710343093]; x1_step1.ymin = -1;
% Layer 1 b1 = [1.3994768477631403;-1.0903884727432014;-0.78011654543485587;0.46705708446426597;0.16258381065370511;0.15440406609565069;-0.46920117774766312;-0.77810609521143459;-1.0906776883213438;-1.4018315002684629]; IW1_1 = [-0.033467156180729256 -0.028666674896511351 -0.015836935383914983 -0.022354599237305886 0.034889064440252859 -0.0064257682289154327 -0.035010735784467298 -0.026398464854472514 -0.014592920844527422 0.0048257790054789321 0.018154687124502519 -0.03867327363872404 -0.023292923677313667 0.025188775694068535 0.045099731617422686 -0.030543785712876966 0.03771919987654563 0.027258279939602156 0.015255478168750499 0.037858970851422521 0.027504725538651929 0.037368094581401889];
% Layer 2 b2 = [0.70447808985535643;-0.84986964370713036;-0.22125627190808969;0.38507031312787227]; LW2_1 = [0.32190735802132342 -0.21033440896068742 0.37105847152081861 -0.34423404255830287 -0.60160268808835204 0.53680797032043304 0.084849606402260497 -0.01577021545359597 0.97526564139699645 -0.93082198599126553;-0.09009035442617104 -0.77884663695955614 0.071553227629483948 0.1605495822863553 -0.75867661703141376 0.84203715319603523 -0.1514908834679804 -0.45805124784681861 -0.48046860963249433 0.24476893884595838];
% ===== SIMULATION ========
% Format Input Arguments isCellX = iscell(X); if ~isCellX X = {X}; end
% Dimensions TS = size(X,2); % timesteps if ~isempty(X) Q = size(X{1},2); % samples/series else Q = 0; end
% Allocate Outputs Y = cell(1,TS);
% Time loop for ts=1:TS
% Input 1
Xp1 = mapminmax_apply(X{1,ts},x1_step1);
% Layer 1
a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*Xp1);
% Layer 2
a2 = softmax_apply(repmat(b2,1,Q) + LW2_1*a1);
% Output 1
Y{1,ts} = a2;
end
% Final Delay States Xf = cell(1,0); Af = cell(2,0);
% Format Output Arguments if ~isCellX Y = cell2mat(Y); end end
% ===== MODULE FUNCTIONS ========
% Map Minimum and Maximum Input Processing Function function y = mapminmax_apply(x,settings) y = bsxfun(@minus,x,settings.xoffset); y = bsxfun(@times,y,settings.gain); y = bsxfun(@plus,y,settings.ymin); end
% Competitive Soft Transfer Function function a = softmax_apply(n,~) if isa(n,'gpuArray') a = iSoftmaxApplyGPU(n); else a = iSoftmaxApplyCPU(n); end end function a = iSoftmaxApplyCPU(n) nmax = max(n,[],1); n = bsxfun(@minus,n,nmax); numerator = exp(n); denominator = sum(numerator,1); denominator(denominator == 0) = 1; a = bsxfun(@rdivide,numerator,denominator); end function a = iSoftmaxApplyGPU(n) nmax = max(n,[],1); numerator = arrayfun(@iSoftmaxApplyGPUHelper1,n,nmax); denominator = sum(numerator,1); a = arrayfun(@iSoftmaxApplyGPUHelper2,numerator,denominator); end function numerator = iSoftmaxApplyGPUHelper1(n,nmax) numerator = exp(n - nmax); end function a = iSoftmaxApplyGPUHelper2(numerator,denominator) if (denominator == 0) a = numerator; else a = numerator ./ denominator; end end
% Sigmoid Symmetric Transfer Function function a = tansig_apply(n,~) a = 2 ./ (1 + exp(-2*n)) - 1; end endif true function [Y,Xf,Af] = myNeuralNetworkFunction(X,~,~) %MYNEURALNETWORKFUNCTION neural network simulation function. % % Generated by Neural Network Toolbox function genFunction, 18-Jul-2018 10:37:01. % % [Y] = myNeuralNetworkFunction(X,~,~) takes these arguments: % % X = 1xTS cell, 1 inputs over TS timesteps % Each X{1,ts} = 2988xQ matrix, input #1 at timestep ts. % % and returns: % Y = 1xTS cell of 1 outputs over TS timesteps. % Each Y{1,ts} = 4xQ matrix, output #1 at timestep ts. % % where Q is number of samples (or series) and TS is the number of timesteps.
%#ok<*RPMT0>
% ===== NEURAL NETWORK CONSTANTS =====
% Input 1 x1_step1.xoffset = [36.6032980307133;36.597744090219;0.158385245033053;1.31643417503494;36.6049269523476;-1.37584123238539;11.0299405848326;12.504831642294;0.288194444444444;0.332638888888889;28.8194444444444;33.2638888888889;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;0;036.607812674681;36.602736524947;36.5999981547226;36.6008968560731;36.6032980307133]; x1_step1.gain = [0.763674710343093;0.849017249009819;2.71347549410056;0.372434551277165;0.76192577149486;;0.770633759029644;0.768310906303693;0.766707167608389;0.764830097334775;0.762377934568729;0.762331857496514;0.763674710343093]; x1_step1.ymin = -1;
% Layer 1 b1 = [1.3994768477631403;-1.0903884727432014;-0.78011654543485587;0.46705708446426597;0.16258381065370511;0.15440406609565069;-0.46920117774766312;-0.77810609521143459;-1.0906776883213438;-1.4018315002684629]; IW1_1 = [-0.033467156180729256 -0.028666674896511351 -0.015836935383914983 -0.022354599237305886 0.034889064440252859 -0.0064257682289154327 -0.035010735784467298 -0.026398464854472514 -0.014592920844527422 -0.037192638242207338 -0.030543785712876966 0.03771919987654563 0.027258279939602156 0.015255478168750499 0.037858970851422521 0.027504725538651929 0.037368094581401889];
% Layer 2 b2 = [0.70447808985535643;-0.84986964370713036;-0.22125627190808969;0.38507031312787227]; LW2_1 = [0.32190735802132342 -0.21033440896068742 0.37105847152081861 -0.34423404255830287 -0.60160268808835204 0.53680797032043304 0.084849606402260497 -0.01577021545359597 0.97526564139699645 -0.93082198599126553;-0.09009035442617104 -0.77884663695955614 0.071553227629483948 0.1605495822863553 -0.9780966231383238 -0.75867661703141376 0.84203715319603523 -0.1514908834679804 -0.45805124784681861 -0.48046860963249433 0.24476893884595838];
% ===== SIMULATION ========
% Format Input Arguments isCellX = iscell(X); if ~isCellX X = {X}; end
% Dimensions TS = size(X,2); % timesteps if ~isempty(X) Q = size(X{1},2); % samples/series else Q = 0; end
% Allocate Outputs Y = cell(1,TS);
% Time loop for ts=1:TS
% Input 1
Xp1 = mapminmax_apply(X{1,ts},x1_step1);
% Layer 1
a1 = tansig_apply(repmat(b1,1,Q) + IW1_1*Xp1);
% Layer 2
a2 = softmax_apply(repmat(b2,1,Q) + LW2_1*a1);
% Output 1
Y{1,ts} = a2;
end
% Final Delay States Xf = cell(1,0); Af = cell(2,0);
% Format Output Arguments if ~isCellX Y = cell2mat(Y); end end
% ===== MODULE FUNCTIONS ========
% Map Minimum and Maximum Input Processing Function function y = mapminmax_apply(x,settings) y = bsxfun(@minus,x,settings.xoffset); y = bsxfun(@times,y,settings.gain); y = bsxfun(@plus,y,settings.ymin); end
% Competitive Soft Transfer Function function a = softmax_apply(n,~) if isa(n,'gpuArray') a = iSoftmaxApplyGPU(n); else a = iSoftmaxApplyCPU(n); end end function a = iSoftmaxApplyCPU(n) nmax = max(n,[],1); n = bsxfun(@minus,n,nmax); numerator = exp(n); denominator = sum(numerator,1); denominator(denominator == 0) = 1; a = bsxfun(@rdivide,numerator,denominator); end function a = iSoftmaxApplyGPU(n) nmax = max(n,[],1); numerator = arrayfun(@iSoftmaxApplyGPUHelper1,n,nmax); denominator = sum(numerator,1); a = arrayfun(@iSoftmaxApplyGPUHelper2,numerator,denominator); end function numerator = iSoftmaxApplyGPUHelper1(n,nmax) numerator = exp(n - nmax); end function a = iSoftmaxApplyGPUHelper2(numerator,denominator) if (denominator == 0) a = numerator; else a = numerator ./ denominator; end end
% Sigmoid Symmetric Transfer Function function a = tansig_apply(n,~) a = 2 ./ (1 + exp(-2*n)) - 1; end end
and also please help me to find out what is the meaning of this code. because I didn't understand the some part of the code what actually it is doing. thank you in advance.

回答(0 个)

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!

Translated by