-
Notifications
You must be signed in to change notification settings - Fork 50
/
dagnn_3dmmasstn_init.m
99 lines (76 loc) · 2.75 KB
/
dagnn_3dmmasstn_init.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
function net = dagnn_3dmmasstn_init(model,idx,opts)
%init the network:
net = load([opts.dataDir, '/vgg-face.mat']);
net.layers=net.layers(1:end-2); % removes last fc and softmax layers, so the last layer in the net now is the relu
weightsandbias = xavier(1,1,4096,16);
weightsandbias{1} = weightsandbias{1}.*0.001;
weightsandbias{2}(4:5)=112;
net.layers{end+1} = struct( 'name', 'theta', ...
'type', 'conv', ...
'weights', {weightsandbias}, ...
'stride', 1, ...
'pad', 0, ...
'learningRate', opts.theta_learningRate) ;
net = vl_simplenn_tidy(net) ;
net = dagnn.DagNN.fromSimpleNN(net, 'canonicalNames', true) ;
thetab_idx = net.getParamIndex('thetab');
net.params(thetab_idx).weightDecay = opts.thetab_weightDecay;
%Split layer
splitlayer = split();
net.addLayer('spl', splitlayer,{'x36'},{'alpha','r','t','logs'});
%3D model layer.
model3Dlayer = model3D('model',model);
net.addLayer('mod', model3Dlayer,{'alpha'},{'x38'});
%r2R layer
r2RLayer = r2R();
net.addLayer('r2R', r2RLayer,{'r'},{'R'});
%rotation layer
rotate3DLayer = rotate3D();
net.addLayer('rot', rotate3DLayer,{'x38','R'},{'x39'});
%projection layer
projectionLayer = projection();
net.addLayer('proj', projectionLayer,{'x39'},{'x40'});
%logScale2Scale layer
logScale2ScaleLayer = logScale2Scale();
net.addLayer('logscal', logScale2ScaleLayer,{'logs'},{'s'});
%scale layer
scale2DLayer = scale2D();
net.addLayer('scal', scale2DLayer,{'x40','s'},{'x41'});
%translation layer
translate2DLayer = translate2D();
net.addLayer('tran', translate2DLayer,{'x41','t'},{'x42'});
%selection layer
selectionLayer = selection('idx',idx);
net.addLayer('sel', selectionLayer,{'x42'},{'pred'});
%euclidean loss
euclideanLayer = euclidean();
net.addLayer('euc', euclideanLayer,{'pred','label'},{'objective1'});
%alpha prior loss
sseLayer = sse();
net.addLayer('sse', sseLayer,{'alpha'},{'objective2'});
%%%
%grid layer
gridLayer = resamplegrid();
net.addLayer('grid', gridLayer,{'x42'},{'x43'});
%BilinearSampler layer
BilinearSamplerLayer = dagnn.BilinearSampler();
net.addLayer('samp', BilinearSamplerLayer,{'input','x43'},{'x44'});
%visibilitymask Layer
visibilityMaskLayer = visibilitymask('faces',model.faces);
net.addLayer('mas', visibilityMaskLayer,{'x39'},{'mask'});
%visibility layer
visibilityLayer = visibility();
net.addLayer('visib', visibilityLayer,{'x44','mask'},{'predgrid'});
%siamese loss
siameseLayer = siamese();
net.addLayer('siam', siameseLayer,{'predgrid'},{'objective3'});
%symmetry loss
symmetryLayer = symmetry();
net.addLayer('sym', symmetryLayer,{'predgrid'},{'objective4'});
net.rebuild();
net.meta.inputSize = [224 224 3];
net.conserveMemory = true;
net.meta.trainOpts.learningRate = opts.learningRate;
net.meta.trainOpts.batchSize = opts.batchSize;
net.meta.trainOpts.numEpochs = opts.numEpochs;
end