Commit ecfb96ae authored by Vaios Papaspyros's avatar Vaios Papaspyros
Browse files

Removing console output from ehvi + Slight code refactoring in experimental +...

Removing console output from ehvi + Slight code refactoring in experimental + Moved acquiopt tag from bo_multi.hpp since it seems to be algorithm specific
parent 5c5e02eb
......@@ -77,46 +77,29 @@ namespace limbo {
#endif
}
// to removed once moved out of experimental?
// to be removed once moved out of experimental?
BOOST_PARAMETER_TEMPLATE_KEYWORD(initfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquifun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(modelfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(statsfun)
BOOST_PARAMETER_TEMPLATE_KEYWORD(stopcrit)
// algo-specific ?
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquiopt)
typedef boost::parameter::parameters<boost::parameter::optional<tag::acquiopt>,
boost::parameter::optional<tag::statsfun>,
typedef boost::parameter::parameters<boost::parameter::optional<tag::statsfun>,
boost::parameter::optional<tag::initfun>,
boost::parameter::optional<tag::acquifun>,
boost::parameter::optional<tag::stopcrit>,
boost::parameter::optional<tag::modelfun>> bo_multi_signature;
// clang-format off
template <class Params,
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
// clang-format on
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
class BoMulti : public limbo::bayes_opt::BoBase<Params, A1, A2, A3, A4, A5, A6> {
public:
struct defaults {
#ifdef USE_LIBCMAES
typedef opt::Cmaes<Params> acquiopt_t;
#elif defined(USE_NLOPT)
typedef opt::NLOptNoGrad<Params, nlopt::GN_DIRECT_L_RAND> acquiopt_t;
#else
#warning NO NLOpt, and NO Libcmaes: the acquisition function will be optimized by a grid search algorithm (which is usually bad). Please install at least NLOpt or libcmaes to use limbo!.
typedef opt::GridSearch<Params> acquiopt_t;
#endif
};
typedef typename bo_multi_signature::bind<A1, A2, A3, A4, A5, A6>::type args;
typedef typename boost::parameter::binding<args, tag::acquiopt, typename defaults::acquiopt_t>::type acqui_optimizer_t;
typedef limbo::bayes_opt::BoBase<Params, A1, A2, A3, A4, A5, A6> base_t;
typedef typename base_t::model_t model_t;
......@@ -147,10 +130,7 @@ namespace limbo {
template <int D>
void update_pareto_model()
{
std::cout << "updating models...";
std::cout.flush();
this->_update_models();
std::cout << "ok" << std::endl;
#ifdef USE_SFERES
typedef sferes::gen::EvoFloat<D, multi::SferesParams> gen_t;
typedef sferes::phen::Parameters<gen_t, multi::SferesFit<model_t>, multi::SferesParams> phen_t;
......@@ -192,9 +172,7 @@ namespace limbo {
assert(sigma.size() == objs.size());
pareto_t p(points.size());
tools::par::loop(0, p.size(), [&](size_t k) {
// clang-format off
p[k] = std::make_tuple(points[k], objs[k], sigma[k]);
// clang-format on
});
return p;
}
......
......@@ -18,22 +18,40 @@ namespace limbo {
BO_PARAM(double, y_ref, -11);
};
}
namespace experimental {
namespace bayes_opt {
// clang-format off
BOOST_PARAMETER_TEMPLATE_KEYWORD(acquiopt)
typedef boost::parameter::parameters<boost::parameter::optional<tag::acquiopt>> ehvi_signature;
template <class Params,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_,
class A5 = boost::parameter::void_,
class A6 = boost::parameter::void_>
// clang-format on
class Ehvi : public BoMulti<Params, A2, A3, A4, A5, A6> {
public:
struct defaults {
#ifdef USE_LIBCMAES
typedef opt::Cmaes<Params> acquiopt_t;
#elif defined(USE_NLOPT)
typedef opt::NLOptNoGrad<Params, nlopt::GN_DIRECT_L_RAND> acquiopt_t;
#else
#warning NO NLOpt, and NO Libcmaes: the acquisition function will be optimized by a grid search algorithm (which is usually bad). Please install at least NLOpt or libcmaes to use limbo!.
typedef opt::GridSearch<Params> acquiopt_t;
#endif
};
typedef typename ehvi_signature::bind<A2, A3, A4, A5, A6>::type args;
typedef typename boost::parameter::binding<args, tag::acquiopt, typename defaults::acquiopt_t>::type acqui_optimizer_t;
typedef std::tuple<Eigen::VectorXd, Eigen::VectorXd, Eigen::VectorXd> pareto_point_t;
typedef limbo::experimental::bayes_opt::BoMulti<Params, A3, A4, A5, A6> base_t;
typedef limbo::experimental::bayes_opt::BoMulti<Params, A2, A3, A4, A5, A6> base_t;
typedef typename base_t::model_t model_t;
typedef typename base_t::acqui_optimizer_t acqui_optimizer_t;
template <typename EvalFunction>
void optimize(const EvalFunction& feval, bool reset = true)
......@@ -57,10 +75,6 @@ namespace limbo {
pop.push_back(ind);
}
// optimize ehvi
std::cout << "optimizing ehvi (" << this->pareto_data().size() << ")"
<< std::endl;
auto acqui = acqui::Ehvi<Params, model_t>(
this->_models, pop,
Eigen::Vector3d(Params::bayes_opt_ehvi::x_ref(), Params::bayes_opt_ehvi::y_ref(), 0));
......@@ -68,44 +82,27 @@ namespace limbo {
// maximize with inner opt
typedef std::pair<Eigen::VectorXd, double> pair_t;
pair_t init(Eigen::VectorXd::Zero(1), -std::numeric_limits<float>::max());
auto body = [&](int i) -> pair_t {
// clang-format off
auto x = this->pareto_data()[i];
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x)); };
auto body = [&](int i) -> pair_t {
auto x = this->pareto_data()[i];
// TODO recheck
// auto acqui_optimization = AcquiOptimization<acquisition_function_t, AggregatorFunction>(acqui, afun, starting_point);
// acqui_optimizer_t acqui_optimizer;
// Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, true);
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return opt::no_grad(acqui(x)); };
Eigen::VectorXd s = inner_opt(acqui_optimization, std::get<0>(x), true);
double hv = acqui(s);
Eigen::VectorXd s = inner_opt(acqui_optimization, std::get<0>(x), true);
double hv = acqui(s);
return std::make_pair(s, hv);
// clang-format on
return std::make_pair(s, hv);
};
auto comp = [](const pair_t& v1, const pair_t& v2) {
// clang-format off
return v1.second > v2.second;
// clang-format on
};
auto m = tools::par::max(init, this->pareto_data().size(), body, comp);
// take the best
std::cout << "best (cmaes):" << m.second << std::endl;
std::cout << "sample selected" << std::endl;
Eigen::VectorXd new_sample = m.first;
std::cout << "new sample:" << new_sample.transpose() << std::endl;
std::cout << "expected improvement: " << acqui(new_sample) << std::endl;
std::cout << "expected value: " << this->_models[0].mu(new_sample) << " "
<< this->_models[1].mu(new_sample) << " "
<< this->_models[0].sigma(new_sample) << " "
<< this->_models[1].sigma(new_sample) << std::endl;
std::cout << "opt done" << std::endl;
// delete pop
for (auto x : pop)
......@@ -113,12 +110,6 @@ namespace limbo {
// add sample
this->add_new_sample(new_sample, feval(new_sample));
std::cout
<< this->_current_iteration << " | new sample:" << new_sample.transpose()
<< " => "
<< this->_observations[this->_observations.size() - 1].transpose()
<< std::endl;
this->_update_stats(*this, FirstElem(), false);
this->_current_iteration++;
this->_total_iterations++;
......
......@@ -36,7 +36,6 @@ namespace limbo {
for (size_t i = 0; i < opt.observations().size(); ++i)
obs << opt.observations()[i].transpose() << " "
<< opt.samples()[i].transpose() << std::endl;
std::cout << "stats done" << std::endl;
}
};
}
......
......@@ -11,6 +11,7 @@
#include <Eigen/LU>
#include <limbo/model/gp/no_lf_opt.hpp>
#include <limbo/tools.hpp>
namespace limbo {
namespace model {
......@@ -206,9 +207,7 @@ namespace limbo {
std::cout << "WARNING max_observation with multi dimensional "
"observations doesn't make sense"
<< std::endl;
Eigen::VectorXd _max_observation(1);
_max_observation << _observations.maxCoeff();
return _max_observation;
return tools::make_vector(_observations.maxCoeff());
}
/// return the mean observation (only call this if the output of the GP is of dimension 1)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment