Commit 4f6cacae authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis Committed by GitHub
Browse files

Merge pull request #139 from resibots/ehvi_multi_experimental

Ehvi experimental working..
parents 614026e7 dd41686b
......@@ -16,16 +16,22 @@ env:
global:
- CI_HOME=`pwd`
matrix:
- LIBCMAES=ON NLOPT=ON TBB=ON EXPERIMENTAL=OFF
- LIBCMAES=ON NLOPT=ON TBB=OFF EXPERIMENTAL=OFF
- LIBCMAES=ON NLOPT=OFF TBB=ON EXPERIMENTAL=OFF
- LIBCMAES=ON NLOPT=OFF TBB=OFF EXPERIMENTAL=OFF
- LIBCMAES=OFF NLOPT=ON TBB=ON EXPERIMENTAL=OFF
- LIBCMAES=OFF NLOPT=ON TBB=OFF EXPERIMENTAL=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=ON EXPERIMENTAL=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=OFF EXPERIMENTAL=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=OFF EXPERIMENTAL=ON
- LIBCMAES=OFF NLOPT=OFF TBB=ON EXPERIMENTAL=ON
- LIBCMAES=ON NLOPT=ON TBB=ON EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=ON NLOPT=ON TBB=OFF EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=ON NLOPT=OFF TBB=ON EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=ON NLOPT=OFF TBB=OFF EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=OFF NLOPT=ON TBB=ON EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=OFF NLOPT=ON TBB=OFF EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=ON EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=OFF EXPERIMENTAL=OFF SFERES=OFF
- LIBCMAES=ON NLOPT=OFF TBB=OFF EXPERIMENTAL=ON SFERES=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=OFF EXPERIMENTAL=ON SFERES=OFF
- LIBCMAES=ON NLOPT=OFF TBB=ON EXPERIMENTAL=ON SFERES=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=ON EXPERIMENTAL=ON SFERES=OFF
- LIBCMAES=ON NLOPT=OFF TBB=OFF EXPERIMENTAL=ON SFERES=ON
- LIBCMAES=OFF NLOPT=OFF TBB=OFF EXPERIMENTAL=ON SFERES=ON
- LIBCMAES=ON NLOPT=OFF TBB=ON EXPERIMENTAL=ON SFERES=ON
- LIBCMAES=OFF NLOPT=OFF TBB=ON EXPERIMENTAL=ON SFERES=ON
addons:
apt:
......@@ -37,9 +43,10 @@ install:
- if [ "$LIBCMAES" = "ON" ]; then 'ci/install_libcmaes.sh' ; fi
- if [ "$NLOPT" = "ON" ]; then 'ci/install_nlopt.sh' ; fi
- if [ "$TBB" = "ON" ]; then 'ci/install_tbb.sh' ; fi
- if [ "$SFERES" = "ON" ]; then 'ci/install_sferes.sh' ; fi
# Change this to your needs
script:
- ./waf configure
- if [ "$SFERES" = "OFF" ]; then ./waf configure ; else ./waf configure --sferes=$CI_HOME/sferes2 ; fi
- if [ "$EXPERIMENTAL" = "OFF" ]; then ./waf --tests --alltests -v ; else ./waf --experimental ; fi
cd $CI_HOME && git clone https://github.com/sferes2/sferes2.git
cd sferes2
./waf configure
./waf
cd $CI_HOME
......@@ -2,31 +2,28 @@
#include <limbo/experimental/bayes_opt/parego.hpp>
#include <limbo/experimental/bayes_opt/nsbo.hpp>
#include <limbo/experimental/bayes_opt/ehvi.hpp>
#include <limbo/experimental/stat/pareto_benchmark.hpp>
using namespace limbo;
using namespace limbo::experimental;
struct Params {
struct boptimizer {
struct bayes_opt_boptimizer : public defaults::bayes_opt_boptimizer {
BO_PARAM(double, noise, 0.01);
BO_PARAM(int, dump_period, 1);
};
struct init {
BO_PARAM(int, nb_samples, 10);
// calandra: number of dimensions * 5
// knowles : 11 * dim - 1
struct init_randomsampling {
BO_PARAM(int, samples, 10);
};
struct bayes_opt_bobase {
BO_PARAM(bool, stats_enabled, false);
struct kernel_exp : public defaults::kernel_exp {
};
struct bayes_opt_parego : public defaults::bayes_opt_parego {
struct bayes_opt_bobase {
BO_PARAM(bool, stats_enabled, true);
};
struct maxiterations {
BO_PARAM(int, n_iterations, 30);
struct stop_maxiterations {
BO_PARAM(int, iterations, 30);
};
struct acqui_ucb : public defaults::acqui_ucb {
......@@ -35,8 +32,16 @@ struct Params {
struct acqui_gpucb : public defaults::acqui_gpucb {
};
#ifdef USE_LIBCMAES
struct opt_cmaes : public defaults::opt_cmaes {
};
#elif defined(USE_NLOPT)
struct opt_nloptnograd : public defaults::opt_nloptnograd {
};
#else
struct opt_gridsearch : public defaults::opt_gridsearch {
};
#endif
struct mean_constant : public defaults::mean_constant {
};
......@@ -56,7 +61,7 @@ struct Params {
#endif
struct zdt1 {
static constexpr size_t dim = ZDT_DIM;
static constexpr size_t dim_in = ZDT_DIM;
Eigen::VectorXd operator()(const Eigen::VectorXd& x) const
{
Eigen::VectorXd res(2);
......@@ -73,7 +78,7 @@ struct zdt1 {
};
struct zdt2 {
static constexpr size_t dim = ZDT_DIM;
static constexpr size_t dim_in = ZDT_DIM;
Eigen::VectorXd operator()(const Eigen::VectorXd& x) const
{
Eigen::VectorXd res(2);
......@@ -90,7 +95,7 @@ struct zdt2 {
};
struct zdt3 {
static constexpr size_t dim = ZDT_DIM;
static constexpr size_t dim_in = ZDT_DIM;
Eigen::VectorXd operator()(const Eigen::VectorXd& x) const
{
Eigen::VectorXd res(2);
......@@ -107,7 +112,9 @@ struct zdt3 {
};
struct mop2 {
static constexpr size_t dim = 2;
static constexpr size_t dim_in = 2;
static constexpr size_t dim_out = 2;
Eigen::VectorXd operator()(const Eigen::VectorXd& x) const
{
Eigen::VectorXd res(2);
......@@ -116,70 +123,15 @@ struct mop2 {
// f1, f2
Eigen::VectorXd v1 = (xx.array() - 1.0 / sqrt(xx.size())).array().square();
Eigen::VectorXd v2 = (xx.array() + 1.0 / sqrt(xx.size())).array().square();
double f1 = 1.0 - ::exp(-v1.sum());
double f2 = 1.0 - ::exp(-v2.sum());
double f1 = 1.0 - exp(-v1.sum());
double f2 = 1.0 - exp(-v2.sum());
// we _maximize in [0:1]
res(0) = -f1 + 1;
res(1) = -f2 + 1;
res(0) = 1 - f1;
res(1) = 1 - f2;
return res;
}
};
namespace limbo {
namespace stat {
template <typename F>
struct ParetoBenchmark {
template <typename BO>
void operator()(BO& opt)
{
opt.update_pareto_data();
#ifndef NSBO // this is already done is NSBO
opt.template update_pareto_model<F::dim>();
#endif
auto dir = opt.res_dir() + "/";
auto p_model = opt.pareto_model();
auto p_data = opt.pareto_data();
std::string it = std::to_string(opt.current_iteration());
std::string model = dir + "pareto_model_" + it + ".dat";
std::string model_real = dir + "pareto_model_real_" + it + ".dat";
std::string data = dir + "pareto_data_" + it + ".dat";
std::string obs_f = dir + "obs_" + it + ".dat";
std::ofstream pareto_model(model.c_str()), pareto_data(data.c_str()),
pareto_model_real(model_real.c_str()), obs(obs_f.c_str());
F f;
for (auto x : p_model)
pareto_model << std::get<1>(x).transpose() << " "
<< std::get<2>(x).transpose() << std::endl;
for (auto x : p_model)
pareto_model_real << f(std::get<0>(x)).transpose() << " " << std::endl;
for (auto x : p_data)
pareto_data << std::get<1>(x).transpose() << std::endl;
for (size_t i = 0; i < opt.observations().size(); ++i)
obs << opt.observations()[i].transpose() << " "
<< opt.samples()[i].transpose() << std::endl;
/*
std::string m1 = "model_" + it + ".dat";
std::ofstream m1f(m1.c_str());
for (float x = 0; x < 1; x += 0.01)
for (float y = 0; y < 1; y += 0.01) {
Eigen::VectorXd v(2);
v << x, y;
m1f << x << " " << y << " "
<< opt.models()[0].mu(v) << " "
<< opt.models()[0].sigma(v) << " "
<< opt.models()[1].mu(v) << " "
<< opt.models()[1].sigma(v) << std::endl;
}
*/
std::cout << "stats done" << std::endl;
}
};
}
}
int main()
{
tools::par::init();
......@@ -190,21 +142,20 @@ int main()
typedef zdt2 func_t;
#elif defined ZDT3
typedef zdt3 func_t;
#elif defined MOP2
typedef mop2 func_t;
#else
typedef mop2 func_t;
#endif
typedef stat::ParetoBenchmark<func_t> stat_t;
using stat_t = boost::fusion::vector<experimental::stat::ParetoBenchmark<func_t>>;
#ifdef PAREGO
Parego<Params, statsfun<stat_t>> opt;
#elif defined(NSBO)
Nsbo<Params, statsfun<stat_t>> opt;
#else
exp::bayes_opt::Ehvi<Params, statsfun<stat_t>> opt;
experimental::bayes_opt::Ehvi<Params, statsfun<stat_t>> opt;
#endif
opt.optimize(func_t());
return 0;
}
......@@ -15,3 +15,10 @@ def build(bld):
target = 'parego',
uselib = 'BOOST EIGEN TBB SFERES LIBCMAES NLOPT',
use = 'limbo')
obj = bld.program(features = 'cxx',
source = 'multi.cpp',
includes = '.. ../.. ../../../',
target = 'multi',
uselib = 'BOOST EIGEN TBB SFERES LIBCMAES NLOPT',
use = 'limbo')
......@@ -4,9 +4,9 @@
#include <limbo/bayes_opt/boptimizer.hpp>
#ifdef USE_SFERES
#include <limbo/bayes_opt/ehvi.hpp>
#include <limbo/bayes_opt/nsbo.hpp>
#include <limbo/bayes_opt/parego.hpp>
#include <limbo/experimental/bayes_opt/ehvi.hpp>
#include <limbo/experimental/bayes_opt/nsbo.hpp>
#include <limbo/experimental/bayes_opt/parego.hpp>
#endif
#endif
#ifndef LIMBO_ACQUI_EHVI_HPP
#define LIMBO_ACQUI_EHVI_HPP
#ifndef LIMBO_EXPERIMENTAL_ACQUI_EHVI_HPP
#define LIMBO_EXPERIMENTAL_ACQUI_EHVI_HPP
#include <vector>
......@@ -22,11 +22,12 @@ namespace limbo {
size_t dim() const { return _models[0].dim(); }
double operator()(const Eigen::VectorXd& v) const
template <typename AggregatorFunction = FirstElem>
double operator()(const Eigen::VectorXd& v, const AggregatorFunction& afun = AggregatorFunction()) const
{
assert(_models.size() == 2);
double r[3] = {_ref_point(0), _ref_point(1), _ref_point(2)};
double mu[3] = {_models[0].mu(v), _models[1].mu(v), 0};
double mu[3] = {afun(_models[0].mu(v)), afun(_models[1].mu(v)), 0};
double s[3] = {_models[0].sigma(v), _models[1].sigma(v), 0};
double ehvi = ehvi2d(_pop, r, mu, s);
return ehvi;
......
#ifndef LIMBO_BAYES_OPT_BO_MULTI_HPP
#define LIMBO_BAYES_OPT_BO_MULTI_HPP
#define VERSION "xxx"
#include <Eigen/Core>
#ifndef USE_SFERES
#warning No sferes
#else
#ifndef USE_TBB
#define NO_PARALLEL
#endif
#include <sferes/phen/parameters.hpp>
#include <sferes/gen/evo_float.hpp>
#ifdef USE_TBB
#include <sferes/eval/parallel.hpp>
#endif
#include <sferes/modif/dummy.hpp>
#include <sferes/ea/nsga2.hpp>
#endif
#include <limbo/bayes_opt/bo_base.hpp>
#include <limbo/experimental/bayes_opt/pareto.hpp>
#include <limbo/experimental/tools/pareto.hpp>
namespace limbo {
namespace experimental {
......@@ -47,8 +51,12 @@ namespace limbo {
};
};
SFERES_FITNESS(SferesFitBase, sferes::fit::Fitness){
template <typename Indiv>
void eval(const Indiv& indiv){}};
template <typename M>
class SferesFit {
class SferesFit : public SferesFitBase<> {
public:
SferesFit(const std::vector<M>& models) : _models(models) {}
SferesFit() {}
......@@ -66,58 +74,40 @@ namespace limbo {
v[j] = indiv.data(j);
// we protect against overestimation because this has some spurious effect
for (size_t i = 0; i < _models.size(); ++i)
this->_objs[i] = std::min(_models[i].mu(v), _models[i].max_observation());
this->_objs[i] = std::min(_models[i].mu(v)(0), _models[i].max_observation()(0));
}
protected:
std::vector<M> _models;
std::vector<float> _objs;
};
#endif
}
// to removed once moved out of experimental?
// to be removed once moved out of experimental?
BOOST_PARAMETER_TEMPLATE_KEYWORD(initfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquifun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(modelfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(statsfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(stopcrit)
// algo-specific ?
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquiopt)
typedef boost::parameter::parameters<boost::parameter::optional<tag::acquiopt>,
boost::parameter::optional<tag::statsfun>,
typedef boost::parameter::parameters<boost::parameter::optional<tag::statsfun>,
boost::parameter::optional<tag::initfun>,
boost::parameter::optional<tag::acquifun>,
boost::parameter::optional<tag::stopcrit>,
boost::parameter::optional<tag::modelfun>> bo_multi_signature;
// clang-format off
template <class Params,
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
// clang-format on
class BoMulti : public limbo::bayes_opt::BoBase<Params, A2, A3, A4, A5, A6> {
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
class BoMulti : public limbo::bayes_opt::BoBase<Params, A1, A2, A3, A4, A5, A6> {
public:
struct defaults {
#ifdef USE_LIBCMAES
typedef opt::Cmaes<Params> acquiopt_t;
#elif defined(USE_NLOPT)
typedef opt::NLOptNoGrad<Params, nlopt::GN_DIRECT_L_RAND> acquiopt_t;
#else
#warning NO NLOpt, and NO Libcmaes: the acquisition function will be optimized by a grid search algorithm (which is usually bad). Please install at least NLOpt or libcmaes to use limbo!.
typedef opt::GridSearch<Params> acquiopt_t;
#endif
};
typedef typename bo_multi_signature::bind<A1, A2, A3, A4, A5, A6>::type args;
typedef typename boost::parameter::binding<args, tag::acquiopt, typename defaults::acquiopt_t>::type acqui_optimizer_t;
typedef limbo::bayes_opt::BoBase<Params, A2, A3, A4, A5, A6> base_t;
typedef limbo::bayes_opt::BoBase<Params, A1, A2, A3, A4, A5, A6> base_t;
typedef typename base_t::model_t model_t;
typedef typename base_t::acquisition_function_t acquisition_function_t;
// point, obj, sigma
......@@ -146,12 +136,8 @@ namespace limbo {
template <int D>
void update_pareto_model()
{
std::cout << "updating models...";
std::cout.flush();
this->_update_models();
std::cout << "ok" << std::endl;
#ifdef USE_SFERES
typedef sferes::gen::EvoFloat<D, multi::SferesParams> gen_t;
typedef sferes::phen::Parameters<gen_t, multi::SferesFit<model_t>, multi::SferesParams> phen_t;
typedef sferes::eval::Parallel<multi::SferesParams> eval_t;
......@@ -192,9 +178,7 @@ namespace limbo {
assert(sigma.size() == objs.size());
pareto_t p(points.size());
tools::par::loop(0, p.size(), [&](size_t k) {
// clang-format off
p[k] = std::make_tuple(points[k], objs[k], sigma[k]);
// clang-format on
});
return p;
}
......@@ -202,14 +186,15 @@ namespace limbo {
void _update_models()
{
size_t dim = this->_samples[0].size();
std::vector<std::vector<double>> uni_obs(nb_objs());
std::vector<std::vector<Eigen::VectorXd>> uni_obs(nb_objs());
for (size_t i = 0; i < this->_observations.size(); ++i)
for (int j = 0; j < this->_observations[i].size(); ++j)
uni_obs[j].push_back(this->_observations[i][j]);
std::vector<model_t> models(nb_objs(), model_t(dim));
uni_obs[j].push_back(Eigen::VectorXd::Constant(1, this->_observations[i][j]));
std::vector<model_t> models(nb_objs(), model_t(dim, 1));
_models = models;
for (size_t i = 0; i < uni_obs.size(); ++i)
_models[i].compute(this->_samples, uni_obs[i], 1e-5);
for (size_t i = 0; i < uni_obs.size(); ++i) {
_models[i].compute(this->_samples, uni_obs[i], Eigen::VectorXd::Constant(this->_samples.size(), 1e-5));
}
}
};
}
......
#ifndef LIMBO_BAYES_OPT_EHVI_HPP
#define LIMBO_BAYES_OPT_EHVI_HPP
#ifndef LIMBO_EXPERIMENTAL_BAYES_OPT_EHVI_HPP
#define LIMBO_EXPERIMENTAL_BAYES_OPT_EHVI_HPP
#include <algorithm>
......@@ -18,22 +18,41 @@ namespace limbo {
BO_PARAM(double, y_ref, -11);
};
}
namespace experimental {
namespace bayes_opt {
// clang-format off
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquiopt)
typedef boost::parameter::parameters<boost::parameter::optional<tag::acquiopt>> ehvi_signature;
template <class Params,
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
// clang-format on
class Ehvi : public BoMulti<Params, A2, A3, A4, A5, A6> {
class Ehvi : public BoMulti<Params, A1, A2, A3, A4, A5, A6> {
public:
struct defaults {
#ifdef USE_LIBCMAES
typedef opt::Cmaes<Params> acquiopt_t;
#elif defined(USE_NLOPT)
typedef opt::NLOptNoGrad<Params, nlopt::GN_DIRECT_L_RAND> acquiopt_t;
#else
#warning NO NLOpt, and NO Libcmaes: the acquisition function will be optimized by a grid search algorithm (which is usually bad). Please install at least NLOpt or libcmaes to use limbo!.
typedef opt::GridSearch<Params> acquiopt_t;
#endif
};
typedef typename ehvi_signature::bind<A1, A2, A3, A4, A5, A6>::type args;
typedef typename boost::parameter::binding<args, tag::acquiopt, typename defaults::acquiopt_t>::type acqui_optimizer_t;
typedef std::tuple<Eigen::VectorXd, Eigen::VectorXd, Eigen::VectorXd> pareto_point_t;
typedef limbo::bayes_opt::BoBase<Params, A3, A4, A5, A6> base_t;
typedef limbo::experimental::bayes_opt::BoMulti<Params, A1, A2, A3, A4, A5, A6> base_t;
typedef typename base_t::model_t model_t;
typedef typename base_t::acqui_optimizer_t acqui_optimizer_t;
template <typename EvalFunction>
void optimize(const EvalFunction& feval, bool reset = true)
......@@ -44,7 +63,7 @@ namespace limbo {
while (this->_samples.size() == 0 || !this->_stop(*this, FirstElem())) {
std::cout.flush();
this->template update_pareto_model<EvalFunction::dim>();
this->template update_pareto_model<EvalFunction::dim_in>();
this->update_pareto_data();
// copy in the ehvi structure to compute expected improvement
......@@ -57,10 +76,6 @@ namespace limbo {
pop.push_back(ind);
}
// optimize ehvi
std::cout << "optimizing ehvi (" << this->pareto_data().size() << ")"
<< std::endl;
auto acqui = acqui::Ehvi<Params, model_t>(
this->_models, pop,
Eigen::Vector3d(Params::bayes_opt_ehvi::x_ref(), Params::bayes_opt_ehvi::y_ref(), 0));
......@@ -68,38 +83,27 @@ namespace limbo {
// maximize with inner opt
typedef std::pair<Eigen::VectorXd, double> pair_t;
pair_t init(Eigen::VectorXd::Zero(1), -std::numeric_limits<float>::max());
auto body = [&](int i) -> pair_t {
// clang-format off
auto x = this->pareto_data()[i];
auto acqui_optimization = AcquiOptimization<acquisition_function_t, AggregatorFunction>(acqui, afun, starting_point);
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, true);
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x)); };
Eigen::VectorXd s = inner_opt(acqui, acqui.dim(), std::get<0>(x), FirstElem());
Eigen::VectorXd s = inner_opt(acqui_optimization, std::get<0>(x), true);
double hv = acqui(s);
return std::make_pair(s, hv);
// clang-format on
};
auto comp = [](const pair_t& v1, const pair_t& v2) {
// clang-format off
return v1.second > v2.second;
// clang-format on
return v1.second > v2.second;
};
auto m = tools::par::max(init, this->pareto_data().size(), body, comp);
// take the best
std::cout << "best (cmaes):" << m.second << std::endl;
std::cout << "sample selected" << std::endl;
Eigen::VectorXd new_sample = m.first;
std::cout << "new sample:" << new_sample.transpose() << std::endl;
std::cout << "expected improvement: " << acqui(new_sample) << std::endl;
std::cout << "expected value: " << this->_models[0].mu(new_sample) << " "
<< this->_models[1].mu(new_sample) << " "
<< this->_models[0].sigma(new_sample) << " "
<< this->_models[1].sigma(new_sample) << std::endl;
std::cout << "opt done" << std::endl;
// delete pop
for (auto x : pop)
......@@ -107,12 +111,6 @@ namespace limbo {
// add sample
this->add_new_sample(new_sample, feval(new_sample));
std::cout
<< this->_current_iteration << " | new sample:" << new_sample.transpose()
<< " => "
<< this->_observations[this->_observations.size() - 1].transpose()
<< std::endl;
this->_update_stats(*this, FirstElem(), false);
this->_current_iteration++;
this->_total_iterations++;
......
#ifndef LIMBO_STAT_PARETO_BENCHMARK_HPP
#define LIMBO_STAT_PARETO_BENCHMARK_HPP
#include <limbo/limbo.hpp>
namespace limbo {
namespace experimental {
namespace stat {
template <typename F>
struct ParetoBenchmark {
template <typename BO, typename AggregatorFunction>
void operator()(BO& opt, const AggregatorFunction& afun, bool blacklisted)
{