Commit 1850c5cc authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

Added test for multi-gp recompute

parent 6c826994
...@@ -54,8 +54,8 @@ namespace limbo { ...@@ -54,8 +54,8 @@ namespace limbo {
/// A wrapper for N-output Gaussian processes. /// A wrapper for N-output Gaussian processes.
/// It is parametrized by: /// It is parametrized by:
/// - GP class /// - GP class
/// - a kernel function /// - a kernel function (the same type for all GPs, but can have different parameters)
/// - a mean function /// - a mean function (the same type and parameters for all GPs)
/// - [optional] an optimizer for the hyper-parameters /// - [optional] an optimizer for the hyper-parameters
template <typename Params, template <typename, typename, typename, typename> class GPClass, typename KernelFunction, typename MeanFunction, class HyperParamsOptimizer = limbo::model::gp::NoLFOpt<Params>> template <typename Params, template <typename, typename, typename, typename> class GPClass, typename KernelFunction, typename MeanFunction, class HyperParamsOptimizer = limbo::model::gp::NoLFOpt<Params>>
class MultiGP { class MultiGP {
...@@ -69,7 +69,6 @@ namespace limbo { ...@@ -69,7 +69,6 @@ namespace limbo {
MultiGP(int dim_in, int dim_out) MultiGP(int dim_in, int dim_out)
: _dim_in(dim_in), _dim_out(dim_out), _mean_function(dim_out) : _dim_in(dim_in), _dim_out(dim_out), _mean_function(dim_out)
{ {
// initialize dim_in models with 1 output // initialize dim_in models with 1 output
_gp_models.resize(_dim_out); _gp_models.resize(_dim_out);
for (int i = 0; i < _dim_out; i++) { for (int i = 0; i < _dim_out; i++) {
......
...@@ -1112,3 +1112,45 @@ BOOST_AUTO_TEST_CASE(test_multi_gp_auto) ...@@ -1112,3 +1112,45 @@ BOOST_AUTO_TEST_CASE(test_multi_gp_auto)
BOOST_CHECK(sigma(0) <= 2. * (gp.gp_models()[0].kernel_function().noise() + 1e-8)); BOOST_CHECK(sigma(0) <= 2. * (gp.gp_models()[0].kernel_function().noise() + 1e-8));
BOOST_CHECK(sigma(1) <= 2. * (gp.gp_models()[1].kernel_function().noise() + 1e-8)); BOOST_CHECK(sigma(1) <= 2. * (gp.gp_models()[1].kernel_function().noise() + 1e-8));
} }
BOOST_AUTO_TEST_CASE(test_multi_gp_recompute)
{
using KF_t = kernel::SquaredExpARD<Params>;
using Mean_t = mean::Constant<Params>;
using MultiGP_t = model::MultiGP<Params, model::GP, KF_t, Mean_t>;
MultiGP_t gp;
gp.add_sample(make_v2(1, 1), make_v1(2));
gp.add_sample(make_v2(2, 2), make_v1(10));
// make sure that the observations are properly passed
BOOST_CHECK(gp._observations[0](0) == 2.);
BOOST_CHECK(gp._observations[1](0) == 10.);
// make sure the child GPs have the proper observations
BOOST_CHECK(gp.gp_models()[0]._observations.row(0)[0] == (2. - gp.mean_function().h_params()[0]));
BOOST_CHECK(gp.gp_models()[0]._observations.row(1)[0] == (10. - gp.mean_function().h_params()[0]));
// now change the mean function parameters
gp.mean_function().set_h_params(make_v1(2));
// make sure that the observations are properly passed
BOOST_CHECK(gp._observations[0](0) == 2.);
BOOST_CHECK(gp._observations[1](0) == 10.);
// make sure the child GPs do not have the proper observations
BOOST_CHECK(!(gp.gp_models()[0]._observations.row(0)[0] == (2. - gp.mean_function().h_params()[0])));
BOOST_CHECK(!(gp.gp_models()[0]._observations.row(1)[0] == (10. - gp.mean_function().h_params()[0])));
// recompute the GP
gp.recompute();
// make sure that the observations are properly passed
BOOST_CHECK(gp._observations[0](0) == 2.);
BOOST_CHECK(gp._observations[1](0) == 10.);
// make sure the child GPs have the proper observations
BOOST_CHECK(gp.gp_models()[0]._observations.row(0)[0] == (2. - gp.mean_function().h_params()[0]));
BOOST_CHECK(gp.gp_models()[0]._observations.row(1)[0] == (10. - gp.mean_function().h_params()[0]));
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment