This repository has been archived by the owner on Jul 12, 2018. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathtrain.lua
120 lines (96 loc) · 2.75 KB
/
train.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
require('cunn')
require('optim')
require('xlua')
-- init
torch.setdefaulttensortype('torch.FloatTensor')
local DataLoader = paths.dofile('data_loader.lua')
-- global
g = {}
_ = require('./model.lua')
g.fixedCNN = _.fixedCNN
g.featureCNN = _.featureCNN
g.lossLayers = _.lossLayers
g.modelInfo = _.info
g.dataLoader = DataLoader.new(4, 'data/allSamples.t7', g.modelInfo)
local doTrain = function(itnum)
print(">>>>>>>>>>>>>TRAINING>>>>>>>>>>>>>");
g.featureCNN:training()
local parameters,gradParameters = g.featureCNN:getParameters()
local xbatch, ybatch, masks = nil, nil, nil
local echo = false
local feval = function(x)
if ( x ~= parameters) then
parameters:copy(x)
end
-- reset gradients
gradParameters:zero()
local xinput = g.fixedCNN:forward(xbatch)
local f = g.featureCNN:forward(xinput)
local totalLoss = {}
local dfs = {}
for i = 1, #f do
if ( i % 2 == 0) then
f[i]:cmul(masks[i])
end
local loss = g.lossLayers[i]:forward(f[i], ybatch[i])
local df = g.lossLayers[i]:backward(f[i], ybatch[i])
if ( i % 2 == 1) then
df:cmul(masks[i])
end
table.insert(totalLoss, loss)
table.insert(dfs, df)
end
g.featureCNN:backward(xinput, dfs)
if echo then
local locErr = 0
local confErr = 0
for i = 1, #totalLoss do
if (i % 2) == 1 then
confErr = confErr + totalLoss[i]
else
locErr = locErr + totalLoss[i]
end
end
print("")
print(confErr, locErr)
end
return totalLoss, gradParameters
end
for i = 1, itnum do
local batch = g.dataLoader:getBatch()
xbatch = batch[1]:cuda()
ybatch = batch[2]
masks = batch[3]
for j = 1, #ybatch do
ybatch[j] = ybatch[j]:cuda()
masks[j] = masks[j]:cuda()
end
if ( i % 10 ) == 0 then
echo = true
else
echo = true
end
g.optim(feval, parameters, g.optimState)
collectgarbage();
xlua.progress(i, itnum)
end
end
local main = function()
g.optim = optim.adam
g.optimState = {
learningRate = 0.0001
}
-- cuda
g.fixedCNN:cuda()
g.featureCNN:cuda()
for i = 1, #g.lossLayers do
g.lossLayers[i]:cuda()
end
for e = 1, 50 do
doTrain(1000)
if ( e > 30) then
torch.save('models/model_' .. e .. '.t7', g.featureCNN)
end
end
end
main()