-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfc_learning_api_mnist_dream.m
60 lines (38 loc) · 1.84 KB
/
fc_learning_api_mnist_dream.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
function [ output_args ] = fc_learning_api( input_args )
%FC_LEARNING_API Summary of this function goes here
% Detailed explanation goes here
startup();
[train_input, train_classes, test_input, test_classes] = GenerateDatasetMNIST();
hidden_neurons_count = 50;
output_neurons_count = 10;
input_dim = size(train_input,2);
rng(0,'v5uniform');
learningRate = 0.5;
momentum = 0.9;
weightDecay = 0.0005;
nn = network();
nn.addLayer(LayerInput(input_dim), {});
nn.addLayer(LayerFC(input_dim,hidden_neurons_count,WeightFillerGaussian(0.1)), GradientUpdaterUsingMomentumAndWeightDecay(learningRate, momentum, weightDecay));
nn.addLayer(LayerActivationSigmoid, {});
nn.addLayer(LayerFC(hidden_neurons_count,output_neurons_count,WeightFillerGaussian(0.1)), GradientUpdaterUsingMomentumAndWeightDecay(learningRate, momentum, weightDecay));
nn.addLayer(LayerActivationSigmoid, {});
epochs = 5;
minibatchSize = 64;
trainSoftmaxNetwork(nn, epochs, minibatchSize, train_input, train_classes);
%rand_input = rand(1,784);
rand_input = train_input(2,:);
lossLayer = LossEuclidean(size(train_input,2));
answers = [0 0 0 0 0 0 0 0 0 1];
for i=1:100
img = reshape(rand_input, [28 28]);
img = imresize(img, 4, 'nearest');
imshow(img);
pause(0.2);
outputs = nn.forwardPropogate(rand_input);
output_last = outputs{end};
loss = lossLayer.computeLoss(output_last, answers);
grads = nn.backPropagate1(outputs, lossLayer.computeDerivative(output_last, answers));
rand_input = rand_input - grads{1};
disp(output_last);
end
end