forked from lawrennd/gpsim
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdemToyProblem5.m
105 lines (90 loc) · 3.76 KB
/
demToyProblem5.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
% DEMTOYPROBLEM5 Generate an artifical data set and solve with GPSIM.
% SHEFFIELDML
load demToyProblem5.mat
bw = true;
% protein function is of the form \sum_i \alpha_i
% \exp(-(t-\mu_i)/sigma_i^2)
alpha = [1.5 1.5 .5 .5];
mu = [4 6 8.5 10.5];
sigma = [1.5 1.5 1.5 1.5];
% Properties of genes:
B = [eps 0.075 0.0025];
S = [1.0 0.400 0.4000];
D = [1.0 0.050 0.0010];
t = linspace(0, 18, 100)';
numGenes=length(D);
truef = gpsimArtificialProtein(t, alpha, mu, sigma);
truey = gpsimArtificialGenes(t, alpha, mu, sigma, B, S, D);
% Get the default options structure.
options = gpsimOptions;
% Learn noise level
options.includeNoise = true;
options.singleNoise = true;
% Prior information for the TF
options.proteinPrior = [0]'; % Assuming that f=0 at t=0
options.proteinPriorTimes = [0]';
options.optimiser = 'scg';
% Fix RBF variance.
options.fix(1).index = 2; % RBF variance
options.fix(1).value = expTransform(1, 'xtoa');
for repNum = 10
counter1 = 0;
rand('seed', repNum*1e6);
randn('seed', repNum*1e6);
for numObs = [2 5 10 15 20]
counter1 = counter1 + 1;
counter2 = 0;
for noiseLevel = [0.0001 0.001 0.01 0.1 1]
counter2 = counter2+1;
datat = rand(numObs, 1)*16;
dataY = gpsimArtificialGenes(datat, alpha, mu, sigma, ...
B, S, D);
model{counter1, counter2, repNum} = gpsimCreate(numGenes, 1, datat, dataY, zeros(size(dataY)), options);
startInd = 1;
endInd = 0;
for i = 2:numGenes+1
endInd = endInd + numObs;
model{counter1, counter2, repNum}.timesCell{i} = rand(numObs, 1)*16;
temp = gpsimArtificialGenes(model{counter1, counter2, repNum}.timesCell{i}, alpha, mu, sigma, ...
B, S, D);
model{counter1, counter2, repNum}.y(startInd:endInd)=temp(:, i-1)+ randn(numObs, 1)*sqrt(noiseLevel);
startInd = endInd + 1;
end
model{counter1, counter2, repNum} = gpsimOptimise(model{counter1, counter2, repNum}, 1, 5000);
K = kernCompute(model{counter1, counter2, repNum}.kern, model{counter1, counter2, repNum}.timesCell);
invK = pdinv(K);
predt = cell(3, 1);
for i=1:length(model{counter1, counter2, repNum}.timesCell)
predt{i} = t;
end
Kstar = kernCompute(model{counter1, counter2, repNum}.kern.comp{1}, predt, model{counter1, counter2, repNum}.timesCell);
obsY = model{counter1, counter2, repNum}.m;
startInd = 1;
endInd = 0;
for i = 1:length(model{counter1, counter2, repNum}.timesCell)
endInd = endInd+length(model{counter1, counter2, repNum}.timesCell{i});
startInd = endInd + 1;
end
invK = pdinv(K);
ypred = reshape(real(Kstar*invK*obsY), 100, 4);
yvar = reshape(real(kernDiagCompute(model{counter1, counter2, repNum}.kern.comp{1}, predt) - sum(Kstar'.* ...
(invK*Kstar'), 1)'), ...
100, 4);
KstarStar = kernCompute(model{counter1, counter2, repNum}.kern.comp{1}, predt);
ycovar = KstarStar - Kstar*invK*Kstar';
numSamps = 10;
ycovar = 0.5*(ycovar + ycovar');
fhat = ypred(:, 1)-truef;
mse(counter1, counter2, repNum) = sum(fhat.*fhat)/length(fhat);
[invCov, U] = pdinv(ycovar(1:length(t), 1:length(t)));
tll(counter1, counter2, repNum) = -length(truef)*.5*log(2*pi)-0.5*logdet(ycovar(1:length(t), 1:length(t)),U) ...
-0.5*(fhat'*invCov*fhat);
noiseLevels(counter1, counter2, repNum) = noiseLevel;
numObservations(counter1, counter2, repNum) = numObs;
save demToyProblem5.mat model tll mse noiseLevels numObservations
end
end
end
mVal= mean(mse(:, :, 1:8), 3)';
stdVal = std(mse(:, :, 1:8),[], 3)';
errorbar(mVal, 2*stdVal)