Skip to content
GitLab
Menu
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Matthias Mayr
limbo
Commits
d83615b9
Commit
d83615b9
authored
Oct 05, 2016
by
Konstantinos Chatzilygeroudis
Committed by
GitHub
Oct 05, 2016
Browse files
Merge pull request #175 from resibots/check_grad
Gradient check test via finite differences
parents
ca437ae9
0bae270f
Changes
4
Hide whitespace changes
Inline
Side-by-side
src/limbo/model/gp/kernel_lf_opt.hpp
View file @
d83615b9
...
...
@@ -62,8 +62,8 @@ namespace limbo {
this
->
_called
=
true
;
KernelLFOptimization
<
GP
>
optimization
(
gp
);
Optimizer
optimizer
;
auto
params
=
optimizer
(
optimization
,
(
gp
.
kernel_function
().
h_params
()
.
array
()
+
6.0
)
/
7.0
,
tru
e
);
gp
.
kernel_function
().
set_h_params
(
-
6.0
+
params
.
array
()
*
7.0
);
auto
params
=
optimizer
(
optimization
,
gp
.
kernel_function
().
h_params
()
,
fals
e
);
gp
.
kernel_function
().
set_h_params
(
params
);
gp
.
set_lik
(
opt
::
eval
(
optimization
,
params
));
gp
.
recompute
(
false
);
}
...
...
@@ -77,7 +77,7 @@ namespace limbo {
opt
::
eval_t
operator
()(
const
Eigen
::
VectorXd
&
params
,
bool
compute_grad
)
const
{
GP
gp
(
this
->
_original_gp
);
gp
.
kernel_function
().
set_h_params
(
-
6.0
+
params
.
array
()
*
7.0
);
gp
.
kernel_function
().
set_h_params
(
params
);
gp
.
recompute
(
false
);
...
...
src/limbo/model/gp/kernel_mean_lf_opt.hpp
View file @
d83615b9
...
...
@@ -64,11 +64,11 @@ namespace limbo {
Optimizer
optimizer
;
int
dim
=
gp
.
kernel_function
().
h_params_size
()
+
gp
.
mean_function
().
h_params_size
();
Eigen
::
VectorXd
init
(
dim
);
init
.
head
(
gp
.
kernel_function
().
h_params_size
())
=
(
gp
.
kernel_function
().
h_params
()
.
array
()
+
6.0
)
/
7.0
;
init
.
tail
(
gp
.
mean_function
().
h_params_size
())
=
(
gp
.
mean_function
().
h_params
()
.
array
()
+
6.0
)
/
7.0
;
auto
params
=
optimizer
(
optimization
,
init
,
tru
e
);
gp
.
kernel_function
().
set_h_params
(
-
6.0
+
params
.
head
(
gp
.
kernel_function
().
h_params_size
())
.
array
()
*
7.0
);
gp
.
mean_function
().
set_h_params
(
-
6.0
+
params
.
tail
(
gp
.
mean_function
().
h_params_size
())
.
array
()
*
7.0
);
init
.
head
(
gp
.
kernel_function
().
h_params_size
())
=
gp
.
kernel_function
().
h_params
();
init
.
tail
(
gp
.
mean_function
().
h_params_size
())
=
gp
.
mean_function
().
h_params
();
auto
params
=
optimizer
(
optimization
,
init
,
fals
e
);
gp
.
kernel_function
().
set_h_params
(
params
.
head
(
gp
.
kernel_function
().
h_params_size
()));
gp
.
mean_function
().
set_h_params
(
params
.
tail
(
gp
.
mean_function
().
h_params_size
()));
gp
.
set_lik
(
opt
::
eval
(
optimization
,
params
));
gp
.
recompute
(
true
);
}
...
...
@@ -82,8 +82,8 @@ namespace limbo {
opt
::
eval_t
operator
()(
const
Eigen
::
VectorXd
&
params
,
bool
compute_grad
)
const
{
GP
gp
(
this
->
_original_gp
);
gp
.
kernel_function
().
set_h_params
(
-
6.0
+
params
.
head
(
gp
.
kernel_function
().
h_params_size
())
.
array
()
*
7.0
);
gp
.
mean_function
().
set_h_params
(
-
6.0
+
params
.
tail
(
gp
.
mean_function
().
h_params_size
())
.
array
()
*
7.0
);
gp
.
kernel_function
().
set_h_params
(
params
.
head
(
gp
.
kernel_function
().
h_params_size
()));
gp
.
mean_function
().
set_h_params
(
params
.
tail
(
gp
.
mean_function
().
h_params_size
()));
gp
.
recompute
(
true
);
...
...
src/limbo/model/gp/mean_lf_opt.hpp
View file @
d83615b9
...
...
@@ -62,8 +62,8 @@ namespace limbo {
this
->
_called
=
true
;
MeanLFOptimization
<
GP
>
optimization
(
gp
);
Optimizer
optimizer
;
auto
params
=
optimizer
(
optimization
,
(
gp
.
mean_function
().
h_params
()
.
array
()
+
6.0
)
/
7.0
,
tru
e
);
gp
.
mean_function
().
set_h_params
(
-
6.0
+
params
.
array
()
*
7.0
);
auto
params
=
optimizer
(
optimization
,
gp
.
mean_function
().
h_params
()
,
fals
e
);
gp
.
mean_function
().
set_h_params
(
params
);
gp
.
set_lik
(
opt
::
eval
(
optimization
,
params
));
gp
.
recompute
(
true
);
}
...
...
@@ -77,7 +77,7 @@ namespace limbo {
opt
::
eval_t
operator
()(
const
Eigen
::
VectorXd
&
params
,
bool
compute_grad
)
const
{
GP
gp
(
this
->
_original_gp
);
gp
.
mean_function
().
set_h_params
(
-
6.0
+
params
.
array
()
*
7.0
);
gp
.
mean_function
().
set_h_params
(
params
);
gp
.
recompute
(
true
);
...
...
src/tests/test_gp.cpp
View file @
d83615b9
...
...
@@ -44,6 +44,7 @@
//|
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE test_gp
#define protected public
#include
<boost/test/unit_test.hpp>
...
...
@@ -53,13 +54,38 @@
#include
<limbo/kernel/exp.hpp>
#include
<limbo/kernel/squared_exp_ard.hpp>
#include
<limbo/mean/constant.hpp>
#include
<limbo/mean/function_ard.hpp>
#include
<limbo/model/gp.hpp>
#include
<limbo/model/gp/kernel_lf_opt.hpp>
#include
<limbo/model/gp/kernel_mean_lf_opt.hpp>
#include
<limbo/model/gp/mean_lf_opt.hpp>
#include
<limbo/opt/grid_search.hpp>
#include
<limbo/tools/macros.hpp>
using
namespace
limbo
;
// Check gradient via finite differences method
template
<
typename
F
>
std
::
tuple
<
double
,
Eigen
::
VectorXd
,
Eigen
::
VectorXd
>
check_grad
(
const
F
&
f
,
const
Eigen
::
VectorXd
&
x
,
double
e
=
1e-4
)
{
Eigen
::
VectorXd
analytic_result
,
finite_diff_result
;
opt
::
eval_t
res
=
f
(
x
,
true
);
analytic_result
=
opt
::
grad
(
res
);
finite_diff_result
=
Eigen
::
VectorXd
::
Zero
(
x
.
size
());
for
(
int
j
=
0
;
j
<
x
.
size
();
j
++
)
{
Eigen
::
VectorXd
test1
=
x
,
test2
=
x
;
test1
[
j
]
-=
e
;
test2
[
j
]
+=
e
;
double
res1
=
opt
::
fun
(
f
(
test1
,
false
));
double
res2
=
opt
::
fun
(
f
(
test2
,
false
));
finite_diff_result
[
j
]
=
(
res2
-
res1
)
/
(
2.0
*
e
);
}
return
std
::
make_tuple
((
analytic_result
-
finite_diff_result
).
norm
(),
analytic_result
,
finite_diff_result
);
}
Eigen
::
VectorXd
make_v1
(
double
x
)
{
return
tools
::
make_vector
(
x
);
...
...
@@ -97,6 +123,68 @@ struct Params {
};
};
BOOST_AUTO_TEST_CASE
(
test_gp_check_lf_grad
)
{
using
namespace
limbo
;
typedef
kernel
::
SquaredExpARD
<
Params
>
KF_t
;
typedef
mean
::
FunctionARD
<
Params
,
mean
::
Constant
<
Params
>>
Mean_t
;
typedef
model
::
GP
<
Params
,
KF_t
,
Mean_t
>
GP_t
;
GP_t
gp
(
4
,
2
);
std
::
vector
<
Eigen
::
VectorXd
>
observations
,
samples
,
test_samples
,
test_samples_mean
,
test_samples_kernel_mean
;
double
e
=
1e-4
;
// Random samples and test samples
int
N
=
40
,
M
=
10
;
for
(
size_t
i
=
0
;
i
<
N
;
i
++
)
{
samples
.
push_back
(
tools
::
random_vector
(
4
));
observations
.
push_back
(
tools
::
random_vector
(
2
));
}
for
(
size_t
i
=
0
;
i
<
M
;
i
++
)
{
test_samples
.
push_back
(
tools
::
random_vector
(
4
));
test_samples_mean
.
push_back
(
tools
::
random_vector
(
6
));
test_samples_kernel_mean
.
push_back
(
tools
::
random_vector
(
6
+
4
));
}
gp
.
compute
(
samples
,
observations
,
Eigen
::
VectorXd
::
Ones
(
samples
.
size
())
*
0.01
);
model
::
gp
::
KernelLFOpt
<
Params
>::
KernelLFOptimization
<
GP_t
>
kernel_optimization
(
gp
);
Eigen
::
VectorXd
results
(
M
);
for
(
size_t
i
=
0
;
i
<
M
;
i
++
)
{
auto
res
=
check_grad
(
kernel_optimization
,
test_samples
[
i
],
1e-4
);
results
(
i
)
=
std
::
get
<
0
>
(
res
);
// std::cout << std::get<1>(res).transpose() << " vs " << std::get<2>(res).transpose() << " --> " << results(i) << std::endl;
}
BOOST_CHECK
(
results
.
array
().
sum
()
<
M
*
e
);
model
::
gp
::
KernelMeanLFOpt
<
Params
>::
KernelMeanLFOptimization
<
GP_t
>
kernel_mean_optimization
(
gp
);
for
(
size_t
i
=
0
;
i
<
M
;
i
++
)
{
auto
res
=
check_grad
(
kernel_mean_optimization
,
test_samples_kernel_mean
[
i
],
1e-4
);
results
(
i
)
=
std
::
get
<
0
>
(
res
);
// std::cout << std::get<1>(res).transpose() << " vs " << std::get<2>(res).transpose() << " --> " << results(i) << std::endl;
}
BOOST_CHECK
(
results
.
array
().
sum
()
<
M
*
e
);
model
::
gp
::
MeanLFOpt
<
Params
>::
MeanLFOptimization
<
GP_t
>
mean_optimization
(
gp
);
for
(
size_t
i
=
0
;
i
<
M
;
i
++
)
{
auto
res
=
check_grad
(
mean_optimization
,
test_samples_mean
[
i
],
1e-4
);
results
(
i
)
=
std
::
get
<
0
>
(
res
);
// std::cout << std::get<1>(res).transpose() << " vs " << std::get<2>(res).transpose() << " --> " << results(i) << std::endl;
}
BOOST_CHECK
(
results
.
array
().
sum
()
<
M
*
e
);
}
BOOST_AUTO_TEST_CASE
(
test_gp_dim
)
{
using
namespace
limbo
;
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment