Commit 14154e05 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

remove all blacklists

parent 59bf5cf0
......@@ -75,7 +75,6 @@ int main()
using Stop_t = boost::fusion::vector<stop::MaxIterations<Params>>;
using Stat_t = boost::fusion::vector<stat::Samples<Params>,
stat::BlSamples<Params>,
stat::BestObservations<Params>,
stat::AggregatedObservations<Params>>;
using Mean_t = mean::Constant<Params>;
......
......@@ -82,15 +82,14 @@ namespace limbo {
}
template <typename BO, typename AggregatorFunction>
struct RefreshStat_f {
RefreshStat_f(BO& bo, const AggregatorFunction& afun, bool blacklisted)
: _bo(bo), _afun(afun), _blacklisted(blacklisted) {}
RefreshStat_f(BO& bo, const AggregatorFunction& afun)
: _bo(bo), _afun(afun) {}
BO& _bo;
const AggregatorFunction& _afun;
bool _blacklisted;
template <typename T>
void operator()(T& x) const { x(_bo, _afun, _blacklisted); }
void operator()(T& x) const { x(_bo, _afun); }
};
struct FirstElem {
......@@ -221,9 +220,6 @@ namespace limbo {
/// return the list of the points that have been evaluated so far (x)
const std::vector<Eigen::VectorXd>& samples() const { return _samples; }
/// return the list of blacklisted points
const std::vector<Eigen::VectorXd>& bl_samples() const { return _bl_samples; }
/// return the current iteration number
int current_iteration() const { return _current_iteration; }
......@@ -240,22 +236,11 @@ namespace limbo {
_observations.push_back(v);
}
/// Add a new blacklisted sample
void add_new_bl_sample(const Eigen::VectorXd& s) { _bl_samples.push_back(s); }
/// Evaluate a sample and add the result to the 'database' (sample / observations vectors) -- it does not update the model
template <typename StateFunction>
bool eval_and_add(const StateFunction& seval, const Eigen::VectorXd& sample)
void eval_and_add(const StateFunction& seval, const Eigen::VectorXd& sample)
{
try {
this->add_new_sample(sample, seval(sample));
}
catch (const EvaluationError& e) {
this->add_new_bl_sample(sample);
return false;
}
return true;
this->add_new_sample(sample, seval(sample));
}
protected:
......@@ -267,7 +252,6 @@ namespace limbo {
this->_total_iterations = 0;
this->_samples.clear();
this->_observations.clear();
this->_bl_samples.clear();
}
if (this->_total_iterations == 0)
......@@ -282,10 +266,10 @@ namespace limbo {
}
template <typename BO, typename AggregatorFunction>
void _update_stats(BO& bo, const AggregatorFunction& afun, bool blacklisted)
void _update_stats(BO& bo, const AggregatorFunction& afun)
{ // not const, because some stat class
// modify the optimizer....
boost::fusion::for_each(_stat, RefreshStat_f<BO, AggregatorFunction>(bo, afun, blacklisted));
boost::fusion::for_each(_stat, RefreshStat_f<BO, AggregatorFunction>(bo, afun));
}
void _make_res_dir()
......@@ -305,7 +289,6 @@ namespace limbo {
std::vector<Eigen::VectorXd> _observations;
std::vector<Eigen::VectorXd> _samples;
std::vector<Eigen::VectorXd> _bl_samples;
};
}
}
......
......@@ -142,7 +142,7 @@ namespace limbo {
this->_init(sfun, afun, reset);
if (!this->_observations.empty())
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_observations.size(), Params::bayes_opt_boptimizer::noise()), this->_bl_samples, Eigen::VectorXd::Constant(this->_bl_samples.size(), Params::bayes_opt_boptimizer::noise()));
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_observations.size(), Params::bayes_opt_boptimizer::noise()));
else
_model = model_t(StateFunction::dim_in, StateFunction::dim_out);
......@@ -155,16 +155,11 @@ namespace limbo {
[&](const Eigen::VectorXd& x, bool g) { return acqui(x,afun,g); };
Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in);
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, true);
bool blacklisted = !this->eval_and_add(sfun, new_sample);
this->eval_and_add(sfun, new_sample);
this->_update_stats(*this, afun, blacklisted);
this->_update_stats(*this, afun);
if (blacklisted) {
_model.add_bl_sample(this->_bl_samples.back(), Params::bayes_opt_boptimizer::noise());
}
else {
_model.add_sample(this->_samples.back(), this->_observations.back(), Params::bayes_opt_boptimizer::noise());
}
_model.add_sample(this->_samples.back(), this->_observations.back(), Params::bayes_opt_boptimizer::noise());
if (Params::bayes_opt_boptimizer::hp_period() > 0
&& (this->_current_iteration + 1) % Params::bayes_opt_boptimizer::hp_period() == 0)
......
......@@ -155,9 +155,9 @@ namespace limbo {
if (!this->_observations.empty()) {
_split_observations();
_model.compute(this->_samples, _obs[0], Eigen::VectorXd::Constant(_obs[0].size(), Params::cbayes_opt_boptimizer::noise()), this->_bl_samples, Eigen::VectorXd::Constant(this->_bl_samples.size(), Params::cbayes_opt_boptimizer::noise()));
_model.compute(this->_samples, _obs[0], Eigen::VectorXd::Constant(_obs[0].size(), Params::cbayes_opt_boptimizer::noise()));
if (_nb_constraints > 0)
_constraint_model.compute(this->_samples, _obs[1], Eigen::VectorXd::Constant(_obs[1].size(), Params::cbayes_opt_boptimizer::noise()), this->_bl_samples, Eigen::VectorXd::Constant(this->_bl_samples.size(), Params::cbayes_opt_boptimizer::noise()));
_constraint_model.compute(this->_samples, _obs[1], Eigen::VectorXd::Constant(_obs[1].size(), Params::cbayes_opt_boptimizer::noise()));
}
else {
_model = model_t(StateFunction::dim_in, StateFunction::dim_out);
......@@ -175,20 +175,13 @@ namespace limbo {
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x, afun)); };
Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in);
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, true);
bool blacklisted = !this->eval_and_add(sfun, new_sample);
this->eval_and_add(sfun, new_sample);
this->_update_stats(*this, afun, blacklisted);
this->_update_stats(*this, afun);
if (blacklisted) {
_model.add_bl_sample(this->_bl_samples.back(), Params::cbayes_opt_boptimizer::noise());
if (_nb_constraints > 0)
_constraint_model.add_bl_sample(this->_bl_samples.back(), Params::cbayes_opt_boptimizer::noise());
}
else {
_model.add_sample(this->_samples.back(), _obs[0].back(), Params::cbayes_opt_boptimizer::noise());
if (_nb_constraints > 0)
_constraint_model.add_sample(this->_samples.back(), _obs[1].back(), Params::cbayes_opt_boptimizer::noise());
}
_model.add_sample(this->_samples.back(), _obs[0].back(), Params::cbayes_opt_boptimizer::noise());
if (_nb_constraints > 0)
_constraint_model.add_sample(this->_samples.back(), _obs[1].back(), Params::cbayes_opt_boptimizer::noise());
if (Params::cbayes_opt_boptimizer::hp_period() > 0
&& (this->_current_iteration + 1) % Params::cbayes_opt_boptimizer::hp_period() == 0) {
......
......@@ -154,7 +154,7 @@ namespace limbo {
// add sample
this->add_new_sample(new_sample, feval(new_sample));
this->_update_stats(*this, FirstElem(), false);
this->_update_stats(*this, FirstElem());
this->_current_iteration++;
this->_total_iterations++;
}
......
......@@ -200,12 +200,12 @@ namespace limbo {
x_d(splitd) = (tmp_tree[h2].x_min[ii](splitd) + 5 * tmp_tree[h2].x_max[ii](splitd)) / 6.0;
// TO-DO: Properly handle bl_samples etc
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()), this->_bl_samples);
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()));
acquisition_function_t acqui_g(_model, M2);
z_max = std::max(z_max, acqui_g(x_g, afun));
M2++;
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()), this->_bl_samples);
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()));
acquisition_function_t acqui_d(_model, M2);
z_max = std::max(z_max, acqui_d(x_d, afun));
M2++;
......@@ -270,7 +270,7 @@ namespace limbo {
// left node
_tree[h + 1].x.push_back(x_g);
// TO-DO: Properly handle bl_samples etc
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()), this->_bl_samples);
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()));
acquisition_function_t acqui_g(_model, M);
double UCB = acqui_g(x_g, afun);
Eigen::VectorXd fsample_g;
......@@ -309,7 +309,7 @@ namespace limbo {
// right node
_tree[h + 1].x.push_back(x_d);
// TO-DO: Properly handle bl_samples etc
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()), this->_bl_samples);
_model.compute(this->_samples, this->_observations, Eigen::VectorXd::Constant(this->_samples.size(), Params::bayes_opt_imgpo::noise()));
acquisition_function_t acqui_d(_model, M);
double UCB2 = acqui_d(x_d, afun);
Eigen::VectorXd fsample_d;
......
......@@ -90,7 +90,7 @@ namespace limbo {
<< this->_models[1].mu(best_v) << ")"
<< " sigma:" << this->_models[0].sigma(best_v) << " "
<< this->_models[1].sigma(best_v) << std::endl;
this->_update_stats(*this, FirstElem(), false);
this->_update_stats(*this, FirstElem());
this->_current_iteration++;
this->_total_iterations++;
}
......
......@@ -80,14 +80,12 @@ namespace limbo {
GPParego() {}
GPParego(int dim_in, int dim_out) : Model(dim_in, 1), _nb_objs(dim_out) {}
void compute(const std::vector<Eigen::VectorXd>& samples,
const std::vector<Eigen::VectorXd>& observations, const Eigen::VectorXd& noises,
const std::vector<Eigen::VectorXd>& bl_samples = std::vector<Eigen::VectorXd>(),
const Eigen::VectorXd& noises_bl = Eigen::VectorXd())
const std::vector<Eigen::VectorXd>& observations, const Eigen::VectorXd& noises)
{
_raw_observations = observations;
_nb_objs = observations[0].size();
auto new_observations = _scalarize_obs(observations);
Model::compute(samples, new_observations, noises, bl_samples);
Model::compute(samples, new_observations, noises);
}
/// add sample will NOT be incremental (we call compute each time)
void add_sample(const Eigen::VectorXd& sample, const Eigen::VectorXd& observation, double noise)
......@@ -98,16 +96,7 @@ namespace limbo {
_raw_observations.push_back(observation);
this->compute(this->_samples,
_raw_observations, this->_noises,
this->_bl_samples);
}
/// WARNING: Parego does not really work with blacklisted samples
void add_bl_sample(const Eigen::VectorXd& bl_sample, double noise)
{
Model::add_bl_sample(bl_sample, noise);
this->compute(this->_samples,
_raw_observations, this->_noises,
this->_bl_samples);
_raw_observations, this->_noises);
}
protected:
......
......@@ -61,7 +61,7 @@ namespace limbo {
template <typename Params>
struct HyperVolume : public limbo::stat::StatBase<Params> {
template <typename BO, typename AggregatorFunction>
void operator()(const BO& bo, const AggregatorFunction&, bool blacklisted)
void operator()(const BO& bo, const AggregatorFunction&)
{
if (bo.observations().empty())
return;
......
......@@ -53,7 +53,7 @@ namespace limbo {
template <typename F>
struct ParetoBenchmark {
template <typename BO, typename AggregatorFunction>
void operator()(BO& opt, const AggregatorFunction& afun, bool blacklisted)
void operator()(BO& opt, const AggregatorFunction& afun)
{
opt.update_pareto_data();
#ifndef NSBO // this is already done is NSBO
......
......@@ -58,7 +58,7 @@ namespace limbo {
typedef std::vector<pareto_point_t> pareto_t;
template <typename BO, typename AggregatorFunction>
void operator()(const BO& bo, const AggregatorFunction&, bool blacklisted)
void operator()(const BO& bo, const AggregatorFunction&)
{
if (!bo.stats_enabled() || bo.observations().empty())
return;
......
......@@ -77,14 +77,11 @@ namespace limbo {
/// Compute the GP from samples, observation, noise. [optional: blacklisted samples]. This call needs to be explicit!
void compute(const std::vector<Eigen::VectorXd>& samples,
const std::vector<Eigen::VectorXd>& observations,
const Eigen::VectorXd& noises,
const std::vector<Eigen::VectorXd>& bl_samples = std::vector<Eigen::VectorXd>(),
const Eigen::VectorXd& noises_bl = Eigen::VectorXd())
const Eigen::VectorXd& noises)
{
assert(samples.size() != 0);
assert(observations.size() != 0);
assert(samples.size() == observations.size());
assert(bl_samples.size() == (unsigned int)noises_bl.size());
_dim_in = samples[0].size();
_kernel_function = KernelFunction(_dim_in); // the cost of building a functor should be relatively low
......@@ -101,15 +98,9 @@ namespace limbo {
_mean_observation = _observations.colwise().mean();
_noises = noises;
_noises_bl = noises_bl;
_bl_samples = bl_samples;
this->_compute_obs_mean();
this->_compute_full_kernel();
if (!_bl_samples.empty())
this->_compute_bl_kernel();
}
/// Do not forget to call this if you use hyper-prameters optimization!!
......@@ -123,13 +114,8 @@ namespace limbo {
void add_sample(const Eigen::VectorXd& sample, const Eigen::VectorXd& observation, double noise)
{
if (_samples.empty()) {
if (_bl_samples.empty()) {
_dim_in = sample.size();
_kernel_function = KernelFunction(_dim_in); // the cost of building a functor should be relatively low
}
else {
assert(sample.size() == _dim_in);
}
_dim_in = sample.size();
_kernel_function = KernelFunction(_dim_in); // the cost of building a functor should be relatively low
_dim_out = observation.size();
_mean_function = MeanFunction(_dim_out); // the cost of building a functor should be relatively low
......@@ -152,31 +138,6 @@ namespace limbo {
this->_compute_obs_mean();
this->_compute_incremental_kernel();
if (!_bl_samples.empty())
this->_compute_bl_kernel();
}
/// add blacklisted sample and update the GP
void add_bl_sample(const Eigen::VectorXd& bl_sample, double noise)
{
if (_samples.empty() && _bl_samples.empty()) {
_dim_in = bl_sample.size();
_kernel_function = KernelFunction(_dim_in); // the cost of building a functor should be relatively low
}
else {
assert(bl_sample.size() == _dim_in);
}
_bl_samples.push_back(bl_sample);
_noises_bl.conservativeResize(_noises_bl.size() + 1);
_noises_bl[_noises_bl.size() - 1] = noise;
//_noise = noise;
if (!_samples.empty()) {
this->_compute_bl_kernel();
}
}
/**
......@@ -186,16 +147,12 @@ namespace limbo {
*/
std::tuple<Eigen::VectorXd, double> query(const Eigen::VectorXd& v) const
{
if (_samples.size() == 0 && _bl_samples.size() == 0)
return std::make_tuple(_mean_function(v, *this),
_kernel_function(v, v));
if (_samples.size() == 0)
return std::make_tuple(_mean_function(v, *this),
_sigma(v, _compute_k_bl(v, _compute_k(v))));
_kernel_function(v, v));
Eigen::VectorXd k = _compute_k(v);
return std::make_tuple(_mu(v, k), _sigma(v, _compute_k_bl(v, k)));
return std::make_tuple(_mu(v, k), _sigma(v, k));
}
/**
......@@ -217,9 +174,9 @@ namespace limbo {
*/
double sigma(const Eigen::VectorXd& v) const
{
if (_samples.size() == 0 && _bl_samples.size() == 0)
if (_samples.size() == 0)
return _kernel_function(v, v);
return _sigma(v, _compute_k_bl(v, _compute_k(v)));
return _sigma(v, _compute_k(v));
}
/// return the number of dimensions of the input
......@@ -269,13 +226,6 @@ namespace limbo {
/// return the number of samples used to compute the GP
int nb_samples() const { return _samples.size(); }
/** return the number of blacklisted samples used to compute the GP
\\rst
For the blacklist concept, see the Limbo-specific concept guide.
\\endrst
*/
int nb_bl_samples() const { return _bl_samples.size(); }
/// recomputes the GP
void recompute(bool update_obs_mean = true)
{
......@@ -285,9 +235,6 @@ namespace limbo {
this->_compute_obs_mean();
this->_compute_full_kernel();
if (!_bl_samples.empty())
this->_compute_bl_kernel();
}
/// return the likelihood (do not compute it!)
......@@ -314,11 +261,9 @@ namespace limbo {
std::vector<Eigen::VectorXd> _samples;
Eigen::MatrixXd _observations;
std::vector<Eigen::VectorXd> _bl_samples; // black listed samples
Eigen::MatrixXd _mean_vector;
Eigen::MatrixXd _obs_mean;
//double _noise;
Eigen::VectorXd _noises;
Eigen::VectorXd _noises_bl;
......@@ -327,10 +272,7 @@ namespace limbo {
Eigen::MatrixXd _kernel;
// Eigen::MatrixXd _inverted_kernel;
Eigen::MatrixXd _matrixL;
Eigen::MatrixXd _inv_bl_kernel;
double _lik;
......@@ -401,46 +343,6 @@ namespace limbo {
triang.adjoint().solveInPlace(_alpha);
}
void _compute_bl_kernel()
{
Eigen::MatrixXd A1 = Eigen::MatrixXd::Identity(this->_samples.size(), this->_samples.size());
_matrixL.template triangularView<Eigen::Lower>().solveInPlace(A1);
_matrixL.template triangularView<Eigen::Lower>().transpose().solveInPlace(A1);
_inv_bl_kernel.resize(_samples.size() + _bl_samples.size(),
_samples.size() + _bl_samples.size());
Eigen::MatrixXd B(_samples.size(), _bl_samples.size());
for (size_t i = 0; i < _samples.size(); i++)
for (size_t j = 0; j < _bl_samples.size(); ++j)
B(i, j) = _kernel_function(_samples[i], _bl_samples[j]);
Eigen::MatrixXd D(_bl_samples.size(), _bl_samples.size());
for (size_t i = 0; i < _bl_samples.size(); i++)
for (size_t j = 0; j < _bl_samples.size(); ++j)
D(i, j) = _kernel_function(_bl_samples[i], _bl_samples[j]) + ((i == j) ? _noises_bl[i] : 0);
Eigen::MatrixXd comA = (D - B.transpose() * A1 * B);
Eigen::LLT<Eigen::MatrixXd> llt_bl(comA);
Eigen::MatrixXd comA1 = Eigen::MatrixXd::Identity(_bl_samples.size(), _bl_samples.size());
llt_bl.matrixL().solveInPlace(comA1);
llt_bl.matrixL().transpose().solveInPlace(comA1);
// fill the matrix block wise
_inv_bl_kernel.block(0, 0, _samples.size(), _samples.size()) = A1 + A1 * B * comA1 * B.transpose() * A1;
_inv_bl_kernel.block(0, _samples.size(), _samples.size(),
_bl_samples.size())
= -A1 * B * comA1;
_inv_bl_kernel.block(_samples.size(), 0, _bl_samples.size(),
_samples.size())
= _inv_bl_kernel.block(0, _samples.size(), _samples.size(),
_bl_samples.size())
.transpose();
_inv_bl_kernel.block(_samples.size(), _samples.size(), _bl_samples.size(),
_bl_samples.size())
= comA1;
}
Eigen::VectorXd _mu(const Eigen::VectorXd& v, const Eigen::VectorXd& k) const
{
return (k.transpose() * _alpha) + _mean_function(v, *this).transpose();
......@@ -448,14 +350,8 @@ namespace limbo {
double _sigma(const Eigen::VectorXd& v, const Eigen::VectorXd& k) const
{
double res;
if (_bl_samples.size() == 0) {
Eigen::VectorXd z = _matrixL.triangularView<Eigen::Lower>().solve(k);
res = _kernel_function(v, v) - z.dot(z);
}
else {
res = _kernel_function(v, v) - k.transpose() * _inv_bl_kernel * k;
}
Eigen::VectorXd z = _matrixL.triangularView<Eigen::Lower>().solve(k);
double res = _kernel_function(v, v) - z.dot(z);
return (res <= std::numeric_limits<double>::epsilon()) ? 0 : res;
}
......@@ -467,21 +363,6 @@ namespace limbo {
k[i] = _kernel_function(_samples[i], v);
return k;
}
Eigen::VectorXd _compute_k_bl(const Eigen::VectorXd& v,
const Eigen::VectorXd& k) const
{
if (_bl_samples.size() == 0) {
return k;
}
Eigen::VectorXd k_bl(_samples.size() + _bl_samples.size());
k_bl.head(_samples.size()) = k;
for (size_t i = 0; i < _bl_samples.size(); i++)
k_bl[i + this->_samples.size()] = this->_kernel_function(_bl_samples[i], v);
return k_bl;
}
};
}
}
......
......@@ -51,7 +51,6 @@
#include <limbo/stat/best_aggregated_observations.hpp>
#include <limbo/stat/best_observations.hpp>
#include <limbo/stat/best_samples.hpp>
#include <limbo/stat/bl_samples.hpp>
#include <limbo/stat/console_summary.hpp>
#include <limbo/stat/aggregated_observations.hpp>
#include <limbo/stat/observations.hpp>
......
......@@ -56,7 +56,7 @@ namespace limbo {
template <typename Params>
struct AggregatedObservations : public StatBase<Params> {
template <typename BO, typename AggregatorFunction>
void operator()(const BO& bo, const AggregatorFunction& afun, bool blacklisted)
void operator()(const BO& bo, const AggregatorFunction& afun)
{
if (!bo.stats_enabled() || bo.observations().empty())
return;
......@@ -69,8 +69,7 @@ namespace limbo {
(*this->_log_file) << "-1 " << afun(bo.observations()[i]) << std::endl;
}
if (!blacklisted)
(*this->_log_file) << bo.total_iterations() << " " << afun(bo.observations().back()) << std::endl;
(*this->_log_file) << bo.total_iterations() << " " << afun(bo.observations().back()) << std::endl;
}
};
}
......
......@@ -56,7 +56,7 @@ namespace limbo {
template <typename Params>
struct BestAggregatedObservations : public StatBase<Params> {
template <typename BO, typename AggregatorFunction>
void operator()(const BO& bo, const AggregatorFunction& afun, bool blacklisted)
void operator()(const BO& bo, const AggregatorFunction& afun)
{
if (!bo.stats_enabled() || bo.observations().empty())
return;
......@@ -66,8 +66,7 @@ namespace limbo {
if (bo.total_iterations() == 0)
(*this->_log_file) << "#iteration best_aggregated_observation" << std::endl;
if (!blacklisted)
(*this->_log_file) << bo.total_iterations() << " " << afun(bo.best_observation(afun)) << std::endl;
(*this->_log_file) << bo.total_iterations() << " " << afun(bo.best_observation(afun)) << std::endl;
}
};
}
......
......@@ -55,7 +55,7 @@ namespace limbo {
template <typename Params>
struct BestObservations : public StatBase<Params> {
template <typename BO, typename AggregatorFunction>
void operator()(const BO& bo, const AggregatorFunction& afun, bool blacklisted)
void operator()(const BO& bo, const AggregatorFunction& afun)
{
if (!bo.stats_enabled() || bo.observations().empty())
return;
......@@ -65,8 +65,7 @@ namespace limbo {
if (bo.total_iterations() == 0)
(*this->_log_file) << "#iteration best_observation" << std::endl;
if (!blacklisted)
(*this->_log_file) << bo.total_iterations() <<