Commit 56133399 authored by Jean-Baptiste Mouret's avatar Jean-Baptiste Mouret
Browse files

explicitly call the HP optimization + add a parameter in bayesian_optimizer to...

explicitly call the HP optimization + add a parameter in bayesian_optimizer to control the frequency of the HP optimization
parent 4aac7164
......@@ -35,8 +35,8 @@ BO_PARAMS(std::cout,
BO_PARAM(bool, stats_enabled, true);
};
struct bayes_opt_boptimizer {
BO_PARAM(double, noise, 0.001);
struct bayes_opt_boptimizer : public defaults::bayes_opt_boptimizer {
BO_PARAM(double, noise, 0.001);
};
struct init_randomsampling {
......
......@@ -30,7 +30,7 @@ struct Params {
BO_PARAM(bool, stats_enabled, true);
};
struct bayes_opt_boptimizer {
struct bayes_opt_boptimizer : public defaults::bayes_opt_boptimizer{
BO_PARAM(double, noise, 0.001);
};
......
......@@ -30,7 +30,7 @@ struct Params {
BO_PARAM(bool, stats_enabled, true);
};
struct bayes_opt_boptimizer {
struct bayes_opt_boptimizer : public defaults::bayes_opt_boptimizer {
BO_PARAM(double, noise, 0.001);
BO_PARAM(bool, stats_enabled, true);
};
......
......@@ -17,6 +17,7 @@ namespace limbo {
namespace defaults {
struct bayes_opt_boptimizer {
BO_PARAM(double, noise, 1e-6);
BO_PARAM(int, hp_period, 5);
};
}
......@@ -113,6 +114,9 @@ namespace limbo {
} else {
_model.add_sample(this->_samples.back(), this->_observations.back(), Params::bayes_opt_boptimizer::noise());
}
if (this->_current_iteration % Params::bayes_opt_boptimizer::hp_period() == 0
|| Params::bayes_opt_boptimizer::hp_period() == -1)
_model.optimize_hyperparams();
this->_current_iteration++;
this->_total_iterations++;
......
......@@ -330,6 +330,7 @@ namespace limbo {
XI = XI + 4.0;
LB_old = LB;
}
_model.optimize_hyperparams();
}
template <typename AggregatorFunction = FirstElem>
......
......@@ -62,9 +62,13 @@ namespace limbo {
if (!_bl_samples.empty())
this->_compute_bl_kernel();
}
/// Do not forget to call this if you use hyper-prameters optimization!!
void optimize_hyperparams() {
HyperParamsOptimizer()(*this);
}
/// add sample and update the GP. This code uses an incremental implementation of the Cholesky
/// decomposition. It is therefore much faster than a call to compute()
void add_sample(const Eigen::VectorXd& sample, const Eigen::VectorXd& observation, double noise)
......@@ -98,8 +102,6 @@ namespace limbo {
if (!_bl_samples.empty())
this->_compute_bl_kernel();
HyperParamsOptimizer()(*this);
}
/// add blacklisted sample and update the GP
......@@ -117,7 +119,6 @@ namespace limbo {
if (!_samples.empty()) {
this->_compute_bl_kernel();
HyperParamsOptimizer()(*this);
}
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment