Commit 65e9dc44 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

Added libGP to regression benchmarks

parent 9ad97164
......@@ -50,6 +50,8 @@ import limbo
def options(opt):
opt.add_option('--enable_bayesopt', action='store_true', help='enable the comparison with bayesopt in the benchmarks ', dest='enable_bayesopt')
opt.add_option('--bayesopt_path', type='string', help='path to Bayesopt', dest='bayesopt_path')
opt.add_option('--enable_libgp', action='store_true', help='enable the comparison with libgp in the benchmarks ', dest='enable_libgp')
opt.add_option('--libgp_path', type='string', help='path to libgp', dest='libgp_path')
def configure(conf):
......@@ -64,6 +66,17 @@ def configure(conf):
conf.env.DEFINES_BAYESOPT = ['USE_BAYESOPT']
conf.get_env()['USE_BAYESOPT'] = True
if conf.options.enable_libgp:
if conf.options.libgp_path:
conf.env.LIBPATH_LIBGP = [conf.options.libgp_path+'/lib']
conf.env.INCLUDES_LIBGP = [conf.options.libgp_path+'/include/']
else:
conf.env.LIBPATH_LIBGP = ['/usr/local/lib']
conf.env.INCLUDES_LIBGP = ['/usr/local/include/']
conf.env.STLIB_LIBGP = ['gp']
conf.env.DEFINES_LIBGP = ['USE_LIBGP']
conf.get_env()['USE_LIBGP'] = True
def build_bo_benchmarks(bld):
......
......@@ -55,11 +55,13 @@ namespace limbo {
namespace model {
namespace gp {
///@ingroup model_opt
///optimize the likelihood of the kernel only
///base class for optimization of the hyper-parameters of a GP
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
struct HPOpt {
public:
HPOpt() : _called(false) {}
/// to avoid stupid warnings
HPOpt(const HPOpt&) { _called = true; }
~HPOpt()
{
if (!_called) {
......@@ -70,8 +72,8 @@ namespace limbo {
protected:
bool _called;
};
}
}
}
} // namespace gp
} // namespace model
} // namespace limbo
#endif
......@@ -7,6 +7,12 @@
#include <limits>
#include "test_functions.hpp"
#ifdef USE_LIBGP
#include <gp/gp.h>
#include <gp/gp_utils.h>
#include <gp/rprop.h>
#endif
using namespace limbo;
template <typename T>
......@@ -61,6 +67,9 @@ void benchmark(const std::string& name, std::vector<int> dimensions, std::vector
int N_test = 10000;
std::ofstream ofs_res(name + ".dat");
#ifdef USE_LIBGP
std::ofstream ofs_libgp(name + "_libgp.dat");
#endif
for (size_t dim = 0; dim < dims.size(); dim++) {
std::vector<Eigen::VectorXd> bounds = func.bounds();
......@@ -76,6 +85,9 @@ void benchmark(const std::string& name, std::vector<int> dimensions, std::vector
// Output number of models
ofs_res << D << " " << N << " @NMODELS" << std::endl;
#ifdef USE_LIBGP
ofs_libgp << D << " " << N << " 1" << std::endl;
#endif
std::cout << name << " in dim: " << D << " and # of points: " << N << std::endl;
std::string file_name = name + "_" + std::to_string(D) + "_" + std::to_string(N);
......@@ -107,6 +119,33 @@ void benchmark(const std::string& name, std::vector<int> dimensions, std::vector
obs[i] = obs[i].array() + gaussian_rand(0.0, sigma);
}
#ifdef USE_LIBGP
std::cout << "Training libGP GP..." << std::endl;
// Learn libGP GP here
libgp::GaussianProcess libgp_gp(D, "CovSum ( CovSEard, CovNoise)");
// set initial hyper-parameters
Eigen::VectorXd libgp_params = Eigen::VectorXd::Zero(libgp_gp.covf().get_param_dim());
libgp_params(D - 1) = std::log(0.01);
libgp_gp.covf().set_loghyper(libgp_params);
// add observations to libGP
for (int i = 0; i < N; i++) {
libgp_gp.add_pattern(points[i].data(), obs[i](0));
}
// optimize hyper-parameters of libGP
libgp::RProp rprop_optimizer;
double delta0 = 0.1;
double deltamin = 1e-6;
double deltamax = 50;
double etaminus = 0.5;
double etaplus = 1.2;
rprop_optimizer.init(1e-4, delta0, deltamin, deltamax, etaminus, etaplus);
auto start_libgp = std::chrono::high_resolution_clock::now();
rprop_optimizer.maximize(&libgp_gp, 300, false);
auto time_libgp = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::high_resolution_clock::now() - start_libgp).count();
std::cout << std::setprecision(std::numeric_limits<long double>::digits10 + 1);
std::cout << "Time of libGP in secs: " << time_libgp / double(1000000.0) << std::endl;
#endif
// Learn the GPs code here
@GPSLEARN
......@@ -129,6 +168,32 @@ void benchmark(const std::string& name, std::vector<int> dimensions, std::vector
test_obs.push_back(ob);
}
#ifdef USE_LIBGP
// Predicition of the libGP GP
std::vector<double> predict_libgp(N_test);
std::vector<double> dummy_libgp(N_test);
start_libgp = std::chrono::high_resolution_clock::now();
for (int i = 0; i < N_test; i++) {
predict_libgp[i] = libgp_gp.f(test_points[i].data());
dummy_libgp[i] = libgp_gp.var(test_points[i].data()); // just here for the time comparisons
}
auto time2_libgp = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::high_resolution_clock::now() - start_libgp).count();
std::cout << "Time of libGP (query) in ms: " << time2_libgp * 1e-3 / double(N_test) << std::endl;
double err_libgp = 0.0;
for (int i = 0; i < N_test; i++) {
Eigen::VectorXd p(1);
p << predict_libgp[i];
err_libgp += (p - test_obs[i]).squaredNorm();
}
err_libgp /= double(N_test);
std::cout << "MSE(libGP): " << err_libgp << std::endl;
// save results of libGP
ofs_libgp << std::setprecision(std::numeric_limits<long double>::digits10 + 1);
ofs_libgp << time_libgp / double(1000000.0) << " " << time2_libgp * 1e-3 / double(N_test) << " " << err_libgp << " libGP" << std::endl;
#endif
// Prediction of the GPs code here
@GPSQUERY
......
......@@ -249,6 +249,9 @@ def compile_regression_benchmarks(bld, json_file):
NLOptIds.append(o)
params += "};"
tab_str = ''
if m == 0:
tab_str = ' '
gp_code = 'using GP_' + str(m) + '_t = ' + gp_type+'<Params'+str(m)+', '+kernel_type+'<Params'+str(m)+'>, '+mean_type+'<Params'+str(m)+'>, '+hp_opt+'<Params'+str(m)+', '
opt = optimizer[0]
opt_find = opt.rfind('::')
......@@ -275,8 +278,8 @@ def compile_regression_benchmarks(bld, json_file):
gp_code += ' std::cout << std::setprecision(std::numeric_limits<long double>::digits10 + 1);\n'
gp_code += ' std::cout << "Time_' + str(m) + ' in secs: " << time_' + str(m) + '/ double(1000000.0) << std::endl;\n'
gp_query_code = 'start_' + str(m) + ' = std::chrono::high_resolution_clock::now();\n'
gp_query_code += ' std::vector<Eigen::VectorXd> predict_' + str(m) + '(N_test);\n'
gp_query_code = ' std::vector<Eigen::VectorXd> predict_' + str(m) + '(N_test);\n'
gp_query_code += ' start_' + str(m) + ' = std::chrono::high_resolution_clock::now();\n'
gp_query_code += ' for (int i = 0; i < N_test; i++) {\n'
gp_query_code += ' double ss;\n'
gp_query_code += ' std::tie(predict_' + str(m) +'[i], ss) = gp_' + str(m) + '.query(test_points[i]);\n'
......@@ -317,7 +320,7 @@ def compile_regression_benchmarks(bld, json_file):
source=name + "_dir/" + name + ".cpp",
target=name,
includes='./src ./src/benchmarks/regression',
uselib='BOOST EIGEN TBB SFERES LIBCMAES NLOPT MKL_TBB',
uselib='BOOST EIGEN TBB SFERES LIBCMAES NLOPT MKL_TBB LIBGP',
cxxflags=bld.env['CXXFLAGS'],
use='limbo')
......
......@@ -94,6 +94,9 @@ def load_data():
if func[-4:] == '_gpy':
func = func[:-4]
var = 'GPy'
if func[-6:] == '_libgp':
func = func[:-6]
var = 'SEFull'
exp = exp[4:]
text_file = open(f, "r")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment