Commit a16b8974 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

Fix for gp nodim bug

parent 128667f9
......@@ -64,11 +64,13 @@ namespace limbo {
template <typename StateFunction, typename AggregatorFunction = FirstElem>
void optimize(const StateFunction& sfun, const AggregatorFunction& afun = AggregatorFunction(), bool reset = true)
{
this->_init(sfun, afun, reset);
if (!this->_observations.empty())
_model.compute(this->_samples, this->_observations, Params::bayes_opt_boptimizer::noise(), this->_bl_samples);
else
_model = model_t(StateFunction::dim_in, StateFunction::dim_out);
acqui_optimizer_t acqui_optimizer;
......
......@@ -19,28 +19,30 @@ namespace limbo {
class GP {
public:
GP() : _dim_in(-1), _dim_out(-1) {}
// useful because the model might be created before having samples
GP(int dim_in, int dim_out)
: _dim_in(dim_in), _dim_out(dim_out), _kernel_function(dim_in), _mean_function(dim_out) {}
void compute(const std::vector<Eigen::VectorXd>& samples,
const std::vector<Eigen::VectorXd>& observations, double noise,
const std::vector<Eigen::VectorXd>& bl_samples = std::vector<Eigen::VectorXd>())
{
//should be checked each time! not only the first time
assert(samples.size() != 0);
assert(observations.size() != 0);
assert(samples.size() == observations.size());
if(_dim_in != samples[0].size())
{
_dim_in = samples[0].size();
_kernel_function=KernelFunction(_dim_in); // the cost of building a functor should be relatively low
}
if(_dim_out != observations[0].size())
{
_dim_out = observations[0].size();
_mean_function = MeanFunction(_dim_out); // the cost of building a functor should be relatively low
}
//should be checked each time! not only the first time
assert(samples.size() != 0);
assert(observations.size() != 0);
assert(samples.size() == observations.size());
if (_dim_in != samples[0].size()) {
_dim_in = samples[0].size();
_kernel_function = KernelFunction(_dim_in); // the cost of building a functor should be relatively low
}
if (_dim_out != observations[0].size()) {
_dim_out = observations[0].size();
_mean_function = MeanFunction(_dim_out); // the cost of building a functor should be relatively low
}
_samples = samples;
_observations.resize(observations.size(), _dim_out);
for (int i = 0; i < _observations.rows(); ++i)
......
......@@ -9,6 +9,8 @@
#include <limbo/mean/constant.hpp>
#include <limbo/model/gp.hpp>
#include <limbo/model/gp/kernel_lf_opt.hpp>
#include <limbo/acqui/ucb.hpp>
#include <limbo/opt/grid_search.hpp>
using namespace limbo;
......@@ -21,9 +23,9 @@ Eigen::VectorXd make_v1(double x)
Eigen::VectorXd make_v2(double x1, double x2)
{
Eigen::VectorXd v2(2);
v2 << x1,x2;
return v2;
Eigen::VectorXd v2(2);
v2 << x1, x2;
return v2;
}
struct Params {
......@@ -40,33 +42,37 @@ struct Params {
struct opt_parallelrepeater : public defaults::opt_parallelrepeater {
};
struct acqui_ucb : public defaults::acqui_ucb {
};
struct opt_gridsearch : public defaults::opt_gridsearch {
};
};
BOOST_AUTO_TEST_CASE(test_gp_dim)
{
using namespace limbo;
typedef kernel::MaternFiveHalfs<Params> KF_t;
typedef mean::Constant<Params> Mean_t;
typedef model::GP<Params, KF_t, Mean_t> GP_t;
GP_t gp; // no init with dim
std::vector<Eigen::VectorXd> observations = {make_v2(5,5), make_v2(10,10),
make_v2(5,5)};
std::vector<Eigen::VectorXd> samples = {make_v2(1,1), make_v2(2,2), make_v2(3,3)};
gp.compute(samples, observations, 0.0);
Eigen::VectorXd mu;
double sigma;
std::tie(mu, sigma) = gp.query(make_v2(1,1));
BOOST_CHECK(std::abs((mu(0) - 5)) < 1);
BOOST_CHECK(std::abs((mu(1) - 5)) < 1);
BOOST_CHECK(sigma < 1e-5);
using namespace limbo;
typedef kernel::MaternFiveHalfs<Params> KF_t;
typedef mean::Constant<Params> Mean_t;
typedef model::GP<Params, KF_t, Mean_t> GP_t;
GP_t gp; // no init with dim
std::vector<Eigen::VectorXd> observations = {make_v2(5, 5), make_v2(10, 10),
make_v2(5, 5)};
std::vector<Eigen::VectorXd> samples = {make_v2(1, 1), make_v2(2, 2), make_v2(3, 3)};
gp.compute(samples, observations, 0.0);
Eigen::VectorXd mu;
double sigma;
std::tie(mu, sigma) = gp.query(make_v2(1, 1));
BOOST_CHECK(std::abs((mu(0) - 5)) < 1);
BOOST_CHECK(std::abs((mu(1) - 5)) < 1);
BOOST_CHECK(sigma < 1e-5);
}
BOOST_AUTO_TEST_CASE(test_gp)
......@@ -109,6 +115,39 @@ BOOST_AUTO_TEST_CASE(test_gp)
}
}
BOOST_AUTO_TEST_CASE(test_gp_no_samples_acqui_opt)
{
using namespace limbo;
struct FirstElem {
typedef double result_type;
double operator()(const Eigen::VectorXd& x) const
{
return x(0);
}
};
typedef opt::GridSearch<Params> acquiopt_t;
typedef kernel::SquaredExpARD<Params> KF_t;
typedef mean::Constant<Params> Mean_t;
typedef model::GP<Params, KF_t, Mean_t> GP_t;
typedef acqui::UCB<Params, GP_t> acquisition_function_t;
GP_t gp(2, 2);
acquisition_function_t acqui(gp, 0);
acquiopt_t acqui_optimizer;
// we do not have gradient in our current acquisition function
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x, FirstElem())); };
Eigen::VectorXd starting_point = tools::random_vector(2);
Eigen::VectorXd test = acqui_optimizer(acqui_optimization, starting_point, true);
BOOST_CHECK(test(0) < 1e-5);
BOOST_CHECK(test(1) < 1e-5);
}
BOOST_AUTO_TEST_CASE(test_gp_blacklist)
{
using namespace limbo;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment