Commit 629292fc authored by Jean-Baptiste Mouret's avatar Jean-Baptiste Mouret
Browse files

add some defaults

parent 8e6419d1
......@@ -182,10 +182,7 @@ namespace limbo {
// defaults
struct defaults {
using init_t = init::RandomSampling<Params>; // 1
using kf_t = kernel::Exp<Params>;
using mean_t = mean::Data<Params>;
using model_t = model::GP<Params, kf_t, mean_t>; // 2
using model_t = model::GPBasic<Params>; // 2
// WARNING: you have to specify the acquisition function
// if you use a custom model
using acqui_t = acqui::UCB<Params, model_t>; // 3
......
......@@ -46,17 +46,17 @@
#ifndef LIMBO_BAYES_OPT_BOPTIMIZER_HPP
#define LIMBO_BAYES_OPT_BOPTIMIZER_HPP
#include <iostream>
#include <algorithm>
#include <iostream>
#include <iterator>
#include <boost/parameter/aux_/void.hpp>
#include <Eigen/Core>
#include <limbo/bayes_opt/bo_base.hpp>
#include <limbo/tools/macros.hpp>
#include <limbo/tools/random_generator.hpp>
#include <limbo/bayes_opt/bo_base.hpp>
#ifdef USE_NLOPT
#include <limbo/opt/nlopt_no_grad.hpp>
#elif defined USE_LIBCMAES
......@@ -152,7 +152,7 @@ namespace limbo {
acquisition_function_t acqui(_model, this->_current_iteration);
auto acqui_optimization =
[&](const Eigen::VectorXd& x, bool g) { return acqui(x,afun,g); };
[&](const Eigen::VectorXd& x, bool g) { return acqui(x, afun, g); };
Eigen::VectorXd starting_point = tools::random_vector(StateFunction::dim_in(), Params::bayes_opt_bobase::bounded());
Eigen::VectorXd new_sample = acqui_optimizer(acqui_optimization, starting_point, Params::bayes_opt_bobase::bounded());
this->eval_and_add(sfun, new_sample);
......@@ -195,7 +195,22 @@ namespace limbo {
protected:
model_t _model;
};
namespace _default_hp {
template <typename Params>
using model_t = model::GPOpt<Params>;
template <typename Params>
using acqui_t = acqui::UCB<Params, model_t<Params>>;
}
/// A shortcut for a BOptimizer with UCB + GPOpt
/// The acquisition function and the model CANNOT be tuned (use BOptimizer for this)
template <class Params,
class A1 = boost::parameter::void_,
class A2 = boost::parameter::void_,
class A3 = boost::parameter::void_,
class A4 = boost::parameter::void_>
using BOptimizerHPOpt = BOptimizer<Params, A1, A2, A3, A4, modelfun<_default_hp::model_t<Params>>, acquifun<_default_hp::acqui_t<Params>>>;
}
}
#endif
......@@ -55,8 +55,13 @@
#include <Eigen/Core>
#include <Eigen/LU>
#include <limbo/model/gp/no_lf_opt.hpp>
#include <limbo/tools.hpp>
#include <limbo/model/gp/no_lf_opt.hpp>
#include <limbo/kernel/squared_exp_ard.hpp>
#include <limbo/kernel/exp.hpp>
#include <limbo/model/gp/kernel_lf_opt.hpp>
#include <limbo/mean/data.hpp>
#include <limbo/mean/constant.hpp>
namespace limbo {
namespace model {
......@@ -525,6 +530,15 @@ namespace limbo {
return k;
}
};
/// GPBasic is a GP with a "mean data" mean function, Exponential kernel,
/// and NO hyper-parameter optimization
template <typename Params>
using GPBasic = GP <Params, kernel::Exp<Params>, mean::Data<Params>, gp::NoLFOpt<Params>>;
/// GPOpt is a GP with a "mean data" mean function, Exponential kernel with Automatic Relevance
/// Determination (ARD), and hyper-parameter optimization based on Rprop
template <typename Params>
using GPOpt = GP<Params, kernel::SquaredExpARD<Params>, mean::Data<Params>, gp::KernelLFOpt<Params>>;
}
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment