-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathNeuralNetworkModel.m
54 lines (41 loc) · 1.58 KB
/
NeuralNetworkModel.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
function [ net ] = NeuralNetworkModel( Learners_values, annotation )
% Reshape data
Class_list = unique(annotation);
Target = zeros(length(Class_list),length(annotation));
for i = 1:length(annotation)
Target(strcmp(Class_list,annotation{i}),i) = 1;
end
Input = Learners_values';
performFcn = 'crossentropy'; % Cross-Entropy
trainFcn = 'trainscg'; % Scaled conjugate gradient backpropagation.
% Create a Pattern Recognition Network
hiddenLayerSize = 10;
net = patternnet(hiddenLayerSize, trainFcn, performFcn );
net.input.processFcns = {'removeconstantrows','mapminmax'};
net.output.processFcns = {'removeconstantrows','mapminmax'};
net.divideFcn = 'dividerand'; % Divide data randomly
net.divideMode = 'sample'; % Divide up every sample
net.divideParam.trainRatio = 75/100;
net.divideParam.valRatio = 5/100;
net.divideParam.testRatio = 20/100;
net.plotFcns = {'plotperform','plottrainstate','ploterrhist', ...
'plotconfusion', 'plotroc'};
% Train the Network
[net,tr] = train(net,Input,Target);
% Test the Network
Y = net(Input);
errors = gsubtract(Target,Y);
performance = perform(net,Target,Y)
tind = vec2ind(Target);
yind = vec2ind(Y);
percentErrors = sum(tind ~= yind)/numel(tind)
% Recalculate Training, Validation and Test Performance
trainTargets = Target .* tr.trainMask{1};
valTargets = Target .* tr.valMask{1};
testTargets = Target .* tr.testMask{1};
trainPerformance = perform(net,trainTargets,Y)
valPerformance = perform(net,valTargets,Y)
testPerformance = perform(net,testTargets,Y)
% View the Network
view(net)
end