Commit 19a1d274 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

BOptimizer bug fixes + tests

parent c83d4bfc
......@@ -91,7 +91,7 @@ namespace limbo {
this->_init(sfun, afun, reset);
if (!this->_observations.empty())
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_observations.size(), Params::bayes_opt_boptimizer::noise()), this->_bl_samples);
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_observations.size(), Params::bayes_opt_boptimizer::noise()), this->_bl_samples, Eigen::VectorXd::Constant(this->_bl_samples.size(), Params::bayes_opt_boptimizer::noise()));
else
_model = model_t(StateFunction::dim_in, StateFunction::dim_out);
......
......@@ -55,27 +55,43 @@ namespace limbo {
Eigen::VectorXd grad(const Eigen::VectorXd& x1, const Eigen::VectorXd& x2) const
{
Eigen::VectorXd grad = Eigen::VectorXd::Zero(this->h_params_size());
Eigen::MatrixXd K = (_A * _A.transpose());
K.diagonal() += (Eigen::MatrixXd)(_ell.array().inverse().square());
double z = ((x1 - x2).transpose() * K * (x1 - x2)).norm();
double k = _sf2 * std::exp(-0.5 * z);
grad.head(_input_dim) = (x1 - x2).cwiseQuotient(_ell).array().square() * k;
Eigen::MatrixXd G = -k * (x1 - x2) * (x1 - x2).transpose() * _A;
for (size_t j = 0; j < Params::kernel_squared_exp_ard::k(); ++j)
grad.segment((1 + j) * _input_dim, _input_dim) = G.col(j);
grad(this->h_params_size() - 1) = 0; // 2.0 * k;
return grad;
if (Params::kernel_squared_exp_ard::k() > 0) {
Eigen::VectorXd grad = Eigen::VectorXd::Zero(this->h_params_size());
Eigen::MatrixXd K = (_A * _A.transpose());
K.diagonal() += (Eigen::MatrixXd)(_ell.array().inverse().square());
double z = ((x1 - x2).transpose() * K * (x1 - x2)).norm();
double k = _sf2 * std::exp(-0.5 * z);
grad.head(_input_dim) = (x1 - x2).cwiseQuotient(_ell).array().square() * k;
Eigen::MatrixXd G = -k * (x1 - x2) * (x1 - x2).transpose() * _A;
for (size_t j = 0; j < Params::kernel_squared_exp_ard::k(); ++j)
grad.segment((1 + j) * _input_dim, _input_dim) = G.col(j);
grad(this->h_params_size() - 1) = 0; // 2.0 * k;
return grad;
}
else {
Eigen::VectorXd grad(_input_dim + 1);
Eigen::VectorXd z = (x1 - x2).cwiseQuotient(_ell).array().square();
double k = _sf2 * std::exp(-0.5 * z.sum());
grad.head(_input_dim) = z * k;
grad(_input_dim) = 0; // 2.0 * k;
return grad;
}
}
double operator()(const Eigen::VectorXd& x1, const Eigen::VectorXd& x2) const
{
assert(x1.size() == _ell.size());
Eigen::MatrixXd K = (_A * _A.transpose());
K.diagonal() += (Eigen::MatrixXd)(_ell.array().inverse().square());
double z = ((x1 - x2).transpose() * K * (x1 - x2)).norm();
double z;
if (Params::kernel_squared_exp_ard::k() > 0) {
Eigen::MatrixXd K = (_A * _A.transpose());
K.diagonal() += (Eigen::MatrixXd)(_ell.array().inverse().square());
z = ((x1 - x2).transpose() * K * (x1 - x2)).norm();
}
else {
z = (x1 - x2).cwiseQuotient(_ell).squaredNorm();
}
return _sf2 * std::exp(-0.5 * z);
}
......
......@@ -153,7 +153,7 @@ BOOST_AUTO_TEST_CASE(test_bo_gp)
// typedef mean_functions::MeanFunctionARD<Params, mean_functions::MeanData<Params>> Mean_t;
typedef mean::Data<Params> Mean_t;
typedef boost::fusion::vector<stat::Samples<Params>, stat::Observations<Params>> Stat_t;
typedef init::NoInit<Params> Init_t;
typedef init::RandomSampling<Params> Init_t;
typedef model::GP<Params, Kernel_t, Mean_t> GP_t;
typedef acqui::UCB<Params, GP_t> Acqui_t;
......@@ -180,7 +180,7 @@ BOOST_AUTO_TEST_CASE(test_bo_blacklist)
// typedef mean_functions::MeanFunctionARD<Params, mean_functions::MeanData<Params>> Mean_t;
typedef mean::Data<Params> Mean_t;
typedef boost::fusion::vector<stat::Samples<Params>, stat::Observations<Params>> Stat_t;
typedef init::NoInit<Params> Init_t;
typedef init::RandomSampling<Params> Init_t;
typedef model::GP<Params, Kernel_t, Mean_t> GP_t;
typedef acqui::UCB<Params, GP_t> Acqui_t;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment