Commit 74ab2f05 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis Committed by GitHub
Browse files

Merge pull request #180 from resibots/experimental_fixes

Experimental fixes
parents 3414eab0 e5cc758c
......@@ -6,7 +6,7 @@
using namespace limbo;
struct Params {
struct cbayes_opt_boptimizer : public defaults::cbayes_opt_boptimizer {
struct bayes_opt_cboptimizer : public defaults::bayes_opt_cboptimizer {
BO_PARAM(double, noise, 0.01);
};
......
......@@ -72,8 +72,10 @@ namespace limbo {
size_t dim_out() const { return _model.dim_out(); }
template <typename AggregatorFunction>
double operator()(const Eigen::VectorXd& v, const AggregatorFunction& afun)
opt::eval_t operator()(const Eigen::VectorXd& v, const AggregatorFunction& afun, bool gradient)
{
assert(!gradient);
Eigen::VectorXd mu;
double sigma_sq;
std::tie(mu, sigma_sq) = _model.query(v);
......@@ -81,7 +83,7 @@ namespace limbo {
// If \sigma(x) = 0 or we do not have any observation yet we return 0
if (sigma < 1e-10 || _model.samples().size() < 1)
return 0.0;
return opt::no_grad(0.0);
// Compute expected constrained improvement
// First find the best (predicted) observation so far -- if needed
......@@ -100,7 +102,7 @@ namespace limbo {
double phi = std::exp(-0.5 * std::pow(Z, 2.0)) / std::sqrt(2.0 * M_PI);
double Phi = 0.5 * std::erfc(-Z / std::sqrt(2));
return _pf(v, afun) * (X * Phi + sigma * phi);
return opt::no_grad(_pf(v, afun) * (X * Phi + sigma * phi));
}
protected:
......
......@@ -66,9 +66,10 @@
namespace limbo {
namespace defaults {
struct cbayes_opt_boptimizer {
struct bayes_opt_cboptimizer {
BO_PARAM(double, noise, 1e-6);
BO_PARAM(int, hp_period, -1);
BO_PARAM(bool, bounded, true);
};
}
......@@ -155,9 +156,9 @@ namespace limbo {
if (!this->_observations.empty()) {
_split_observations();
_model.compute(this->_samples, _obs[0], Eigen::VectorXd::Constant(_obs[0].size(), Params::cbayes_opt_boptimizer::noise()));
_model.compute(this->_samples, _obs[0], Eigen::VectorXd::Constant(_obs[0].size(), Params::bayes_opt_cboptimizer::noise()));
if (_nb_constraints > 0)
_constraint_model.compute(this->_samples, _obs[1], Eigen::VectorXd::Constant(_obs[1].size(), Params::cbayes_opt_boptimizer::noise()));
_constraint_model.compute(this->_samples, _obs[1], Eigen::VectorXd::Constant(_obs[1].size(), Params::bayes_opt_cboptimizer::noise()));
}
else {
_model = model_t(StateFunction::dim_in, StateFunction::dim_out);
......@@ -170,21 +171,20 @@ namespace limbo {
while (!this->_stop(*this, afun)) {
acquisition_function_t acqui(_model, _constraint_model, this->_current_iteration);
// we do not have gradient in our current acquisition function
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x, afun)); };
Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in);
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, true);
[&](const Eigen::VectorXd& x, bool g) { return acqui(x,afun,g); };
Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in, Params::bayes_opt_cboptimizer::bounded());
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, Params::bayes_opt_cboptimizer::bounded());
this->eval_and_add(sfun, new_sample);
this->_update_stats(*this, afun);
_model.add_sample(this->_samples.back(), _obs[0].back(), Params::cbayes_opt_boptimizer::noise());
_model.add_sample(this->_samples.back(), _obs[0].back(), Params::bayes_opt_cboptimizer::noise());
if (_nb_constraints > 0)
_constraint_model.add_sample(this->_samples.back(), _obs[1].back(), Params::cbayes_opt_boptimizer::noise());
_constraint_model.add_sample(this->_samples.back(), _obs[1].back(), Params::bayes_opt_cboptimizer::noise());
if (Params::cbayes_opt_boptimizer::hp_period() > 0
&& (this->_current_iteration + 1) % Params::cbayes_opt_boptimizer::hp_period() == 0) {
if (Params::bayes_opt_cboptimizer::hp_period() > 0
&& (this->_current_iteration + 1) % Params::bayes_opt_cboptimizer::hp_period() == 0) {
_model.optimize_hyperparams();
if (_nb_constraints > 0)
_constraint_model.optimize_hyperparams();
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment