forked from clab/dynet
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test-trainers.cc
127 lines (115 loc) · 3.54 KB
/
test-trainers.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
#include <dynet/dynet.h>
#include <dynet/expr.h>
#include <dynet/training.h>
#include <dynet/grad-check.h>
#include <boost/test/unit_test.hpp>
#include <stdexcept>
using namespace dynet;
using namespace dynet::expr;
using namespace std;
struct TrainerTest {
TrainerTest() {
// initialize if necessary
if(default_device == nullptr) {
for (auto x : {"TrainerTest", "--dynet-mem", "10"}) {
av.push_back(strdup(x));
}
char **argv = &av[0];
int argc = av.size();
dynet::initialize(argc, argv);
}
ones_vals = {1.f,1.f,1.f};
param_vals = {1.1f,-2.2f,3.3f};
}
~TrainerTest() {
for (auto x : av) free(x);
}
template <class T>
std::string print_vec(const std::vector<T> vec) {
ostringstream oss;
if(vec.size()) oss << vec[0];
for(size_t i = 1; i < vec.size(); i++)
oss << ' ' << vec[i];
return oss.str();
}
std::vector<float> ones_vals, param_vals;
std::vector<char*> av;
};
// define the test suite
BOOST_FIXTURE_TEST_SUITE(trainer_test, TrainerTest);
BOOST_AUTO_TEST_CASE( simple_sgd_direction ) {
dynet::Model mod;
dynet::Parameter param = mod.add_parameters({3});
TensorTools::SetElements(param.get()->values,param_vals);
SimpleSGDTrainer trainer(mod);
dynet::ComputationGraph cg;
Expression x = parameter(cg, param);
Expression y = input(cg, {1,3}, ones_vals);
Expression z = y*x;
float before = as_scalar(cg.forward(z));
cg.backward(z);
trainer.update(0.1);
float after = as_scalar(cg.forward(z));
BOOST_CHECK_LT(after, before);
}
BOOST_AUTO_TEST_CASE( momentum_sgd_direction ) {
dynet::Model mod;
dynet::Parameter param = mod.add_parameters({3});
TensorTools::SetElements(param.get()->values,param_vals);
MomentumSGDTrainer trainer(mod);
dynet::ComputationGraph cg;
Expression x = parameter(cg, param);
Expression y = input(cg, {1,3}, ones_vals);
Expression z = y*x;
float before = as_scalar(cg.forward(z));
cg.backward(z);
trainer.update(0.1);
float after = as_scalar(cg.forward(z));
BOOST_CHECK_LT(after, before);
}
BOOST_AUTO_TEST_CASE( adagrad_direction ) {
dynet::Model mod;
dynet::Parameter param = mod.add_parameters({3});
TensorTools::SetElements(param.get()->values,param_vals);
AdagradTrainer trainer(mod);
dynet::ComputationGraph cg;
Expression x = parameter(cg, param);
Expression y = input(cg, {1,3}, ones_vals);
Expression z = y*x;
float before = as_scalar(cg.forward(z));
cg.backward(z);
trainer.update(0.1);
float after = as_scalar(cg.forward(z));
BOOST_CHECK_LT(after, before);
}
BOOST_AUTO_TEST_CASE( adadelta_direction ) {
dynet::Model mod;
dynet::Parameter param = mod.add_parameters({3});
TensorTools::SetElements(param.get()->values,param_vals);
AdadeltaTrainer trainer(mod);
dynet::ComputationGraph cg;
Expression x = parameter(cg, param);
Expression y = input(cg, {1,3}, ones_vals);
Expression z = y*x;
float before = as_scalar(cg.forward(z));
cg.backward(z);
trainer.update(0.1);
float after = as_scalar(cg.forward(z));
BOOST_CHECK_LT(after, before);
}
BOOST_AUTO_TEST_CASE( adam_direction ) {
dynet::Model mod;
dynet::Parameter param = mod.add_parameters({3});
TensorTools::SetElements(param.get()->values,param_vals);
AdamTrainer trainer(mod);
dynet::ComputationGraph cg;
Expression x = parameter(cg, param);
Expression y = input(cg, {1,3}, ones_vals);
Expression z = y*x;
float before = as_scalar(cg.forward(z));
cg.backward(z);
trainer.update(0.1);
float after = as_scalar(cg.forward(z));
BOOST_CHECK_LT(after, before);
}
BOOST_AUTO_TEST_SUITE_END()