Commit 38837e9f authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

Added automatic way of building and running regression benchmarks (using a json file)

parent db5b6df5
#ifndef BENCHMARKS_REGRESSION_TEST_FUNCTIONS_HPP
#define BENCHMARKS_REGRESSION_TEST_FUNCTIONS_HPP
struct Rastrigin {
double operator()(const Eigen::VectorXd& x) const
{
double f = 10 * x.size();
for (int i = 0; i < x.size(); ++i)
f += x(i) * x(i) - 10 * cos(2 * M_PI * x(i));
return f;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -5.12, 5.12;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return -1;
}
};
struct Ackley {
double operator()(const Eigen::VectorXd& x) const
{
double a = 20, b = 0.2, c = 2 * M_PI;
double A = 0.0, B = 0.0;
for (int i = 0; i < x.size(); i++) {
A += x(i) * x(i);
B += std::cos(c * x(i));
}
A = -b * std::sqrt(A / double(x.size()));
B = B / double(x.size());
return -a * std::exp(A) - std::exp(B) + a + std::exp(1);
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -32.768, 32.768;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return -1;
}
};
struct Bukin {
double operator()(const Eigen::VectorXd& x) const
{
return 100.0 * std::sqrt(std::abs(x(1) - 0.01 * x(0) * x(0))) + 0.01 * std::abs(x(0) + 10);
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -15, 5;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
tmp << -3, 3;
b.push_back(tmp);
return b;
}
int dims() const
{
return 2;
}
};
struct CrossInTray {
double operator()(const Eigen::VectorXd& x) const
{
double A = std::sin(x(0)) * std::sin(x(1));
double B = std::abs(100 - x.norm() / M_PI);
return -0.0001 * std::pow((std::abs(A * std::exp(B)) + 1), 0.1);
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -10, 10;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 2;
}
};
struct DropWave {
double operator()(const Eigen::VectorXd& x) const
{
return -(1.0 + std::cos(12 * x.norm())) / (0.5 * x.squaredNorm() + 2.0);
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -5.12, 5.12;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 2;
}
};
struct GramacyLee {
double operator()(const Eigen::VectorXd& x) const
{
return std::sin(10 * M_PI * x(0)) / (2.0 * x(0)) + std::pow((x(0) - 1.0), 4.0);
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << 0.5, 2.5;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 1;
}
};
struct Step {
double operator()(const Eigen::VectorXd& x) const
{
if (x(0) <= 0.0)
return 0.0;
else
return 1.0;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -2.0, 2.0;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 1;
}
};
struct HolderTable {
double operator()(const Eigen::VectorXd& x) const
{
double A = std::sin(x(0)) * std::cos(x(1));
double B = std::abs(1.0 - x.norm() / M_PI);
return -std::abs(A * std::exp(B));
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -10, 10;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 2;
}
};
struct Levy {
double operator()(const Eigen::VectorXd& x) const
{
Eigen::VectorXd w = 1.0 + (x.array() - 1.0) / 4.0;
double A = std::sin(M_PI * w(0)) * std::sin(M_PI * w(0));
double B = 0.0;
for (int i = 0; i < x.size() - 1; i++) {
double tmp = 1.0 + 10.0 * std::sin(M_PI * w(i) + 1) * std::sin(M_PI * w(i) + 1);
B += (w(i) - 1.0) * (w(i) - 1.0) * tmp;
}
double C = (w(x.size() - 1) - 1.0) * (w(x.size() - 1) - 1.0) * (1.0 + std::sin(2 * M_PI * w(x.size() - 1)) * std::sin(2 * M_PI * w(x.size() - 1)));
return A + B + C;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -10, 10;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return -1;
}
};
struct Schwefel {
double operator()(const Eigen::VectorXd& x) const
{
double A = 418.9829 * x.size();
double B = 0.0;
for (int i = 0; i < x.size(); i++) {
B += x(i) * std::sin(std::sqrt(std::abs(x(i))));
}
return A - B;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -500, 500;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return -1;
}
};
struct SixHumpCamel {
double operator()(const Eigen::VectorXd& x) const
{
double x1 = x(0);
double x2 = x(1);
double x1_2 = x1 * x1;
double x2_2 = x2 * x2;
double tmp1 = (4 - 2.1 * x1_2 + (x1_2 * x1_2) / 3) * x1_2;
double tmp2 = x1 * x2;
double tmp3 = (-4 + 4 * x2_2) * x2_2;
return tmp1 + tmp2 + tmp3;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << -3, 3;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
tmp << -2, 2;
b.push_back(tmp);
return b;
}
int dims() const
{
return 2;
}
};
struct Hartmann6 {
double operator()(const Eigen::VectorXd& x) const
{
Eigen::MatrixXd a(4, 6);
Eigen::MatrixXd p(4, 6);
a << 10, 3, 17, 3.5, 1.7, 8, 0.05, 10, 17, 0.1, 8, 14, 3, 3.5, 1.7, 10, 17,
8, 17, 8, 0.05, 10, 0.1, 14;
p << 0.1312, 0.1696, 0.5569, 0.0124, 0.8283, 0.5886, 0.2329, 0.4135, 0.8307,
0.3736, 0.1004, 0.9991, 0.2348, 0.1451, 0.3522, 0.2883, 0.3047, 0.665,
0.4047, 0.8828, 0.8732, 0.5743, 0.1091, 0.0381;
Eigen::VectorXd alpha(4);
alpha << 1.0, 1.2, 3.0, 3.2;
double res = 0;
for (int i = 0; i < 4; i++) {
double s = 0.0f;
for (size_t j = 0; j < 6; j++) {
s += a(i, j) * (x(j) - p(i, j)) * (x(j) - p(i, j));
}
res += alpha(i) * exp(-s);
}
return res;
}
std::vector<Eigen::VectorXd> bounds() const
{
Eigen::VectorXd tmp(2);
tmp << 0, 1;
std::vector<Eigen::VectorXd> b;
b.push_back(tmp);
return b;
}
int dims() const
{
return 6;
}
};
#endif
......@@ -65,7 +65,7 @@ def configure(conf):
conf.get_env()['USE_BAYESOPT'] = True
def build_benchmark(bld):
def build_bo_benchmarks(bld):
if bld.env.DEFINES_NLOPT == ['USE_NLOPT']:
bld.program(features='cxx',
......
#include <iostream>
#include <limbo/limbo.hpp>
#include <chrono>
#include <algorithm>
#include <string>
#include "test_functions.hpp"
using namespace limbo;
template <typename T>
inline T gaussian_rand(T m = 0.0, T v = 1.0)
{
static std::random_device rd;
static std::mt19937 gen(rd());
std::normal_distribution<T> gaussian(m, v);
return gaussian(gen);
}
@PARAMS
inline Eigen::MatrixXd sample_covariance(const std::vector<Eigen::VectorXd>& points)
{
assert(points.size());
// Get the sample means
Eigen::VectorXd means = Eigen::VectorXd::Zero(points[0].size());
for (size_t i = 0; i < points.size(); i++) {
means.array() += points[i].array();
}
means = means.array() / double(points.size());
// Calculate the sample covariance matrix
Eigen::MatrixXd cov = Eigen::MatrixXd::Zero(points[0].size(), points[0].size());
for (size_t i = 0; i < points.size(); i++) {
cov = cov + points[i] * points[i].transpose();
}
cov = (cov.array() - (double(points.size()) * means * means.transpose()).array()) / (double(points.size()) - 1.0);
return cov;
}
template <typename Function>
void benchmark(const std::string& name, std::vector<int> dimensions, std::vector<int> points)
{
Function func;
std::vector<int> dims;
if (func.dims() > 0)
dims.push_back(func.dims());
else {
dims = dimensions;
}
// Whether to add noise or not
bool add_noise = @NOISE;
int N_test = 10000;
std::ofstream ofs_res(name + ".dat");
for (size_t dim = 0; dim < dims.size(); dim++) {
std::vector<Eigen::VectorXd> bounds = func.bounds();
bool one_bound = (bounds.size() == 1);
int D = dims[dim];
for (size_t n = 0; n < points.size(); n++) {
int N = points[n];
if (N <= 0) {
std::cerr << "Number of points less or equal to zero!" << std::endl;
continue;
}
// Output number of models
ofs_res << D << " " << N << " @NMODELS" << std::endl;
std::cout << name << " in dim: " << D << " and # of points: " << N << std::endl;
std::string file_name = name + "_" + std::to_string(D) + "_" + std::to_string(N);
std::vector<Eigen::VectorXd> points, obs;
for (int i = 0; i < N; i++) {
Eigen::VectorXd p = limbo::tools::random_vector(D); //.array() * 10.24 - 5.12;
if (one_bound)
p = p.array() * (bounds[0](1) - bounds[0](0)) + bounds[0](0);
else {
for (int j = 0; j < D; j++) {
p(j) = p(j) * (bounds[j](1) - bounds[j](0)) + bounds[j](0);
}
}
points.push_back(p);
Eigen::VectorXd ob(1);
ob << func(p);
obs.push_back(ob);
}
if (add_noise) {
Eigen::MatrixXd cov = sample_covariance(obs);
double sigma = std::sqrt(cov(0, 0)) / 20.0;
std::cout << "Adding noise of: " << sigma << std::endl;
for (int i = 0; i < N; i++)
obs[i] = obs[i].array() + gaussian_rand(0.0, sigma);
}
// Learn the GPs code here
@GPSLEARN
// Generation of test data
std::vector<Eigen::VectorXd> test_points, test_obs;
for (int i = 0; i < N_test; i++) {
Eigen::VectorXd p = limbo::tools::random_vector(D);
if (one_bound)
p = p.array() * (bounds[0](1) - bounds[0](0)) + bounds[0](0);
else {
for (int j = 0; j < D; j++) {
p(j) = p(j) * (bounds[j](1) - bounds[j](0)) + bounds[j](0);
}
}
test_points.push_back(p);
Eigen::VectorXd ob(1);
ob << func(p);
test_obs.push_back(ob);
}
// Prediction of the GPs code here
@GPSQUERY
std::cout << "Saving data..." << std::endl;
// std::ofstream ofs(file_name + "_gp.dat");
// std::ofstream ofs_real(file_name + "_real.dat");
// int pp = 4000;
// for (int i = 0; i < pp; ++i) {
// Eigen::VectorXd v = limbo::tools::random_vector(D);
// if (one_bound)
// v = v.array() * (bounds[0](1) - bounds[0](0)) + bounds[0](0);
// else {
// for (int j = 0; j < D; j++) {
// v(j) = v(j) * (bounds[j](1) - bounds[j](0)) + bounds[j](0);
// }
// }
// Eigen::VectorXd mu;
// double sigma;
// std::tie(mu, sigma) = gp.query(v);
//
// ofs << v.transpose() << " " << mu[0] << " " << std::sqrt(sigma) << std::endl;
//
// double val = func(v);
// ofs_real << v.transpose() << " " << val << " 0" << std::endl;
// }
std::ofstream ofs_data(file_name + "_data.dat");
for (size_t i = 0; i < points.size(); ++i)
ofs_data << points[i].transpose() << " " << obs[i].transpose() << std::endl;
std::ofstream ofs_test(file_name + "_test.dat");
for (size_t i = 0; i < test_points.size(); ++i)
ofs_test << test_points[i].transpose() << " " << test_obs[i].transpose() << std::endl;
std::cout << "Data saved...!" << std::endl;
}
}
}
int main(int argc, char* argv[])
{
@FUNCS
return 0;
}
......@@ -146,6 +146,246 @@ def create_exp(name, opt):
cpp.write(cpp_tpl)
cpp.close()
def compile_regression_benchmarks(bld, json_file):
import types
def convert(name):
import re
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
configs = simplejson.load(open(json_file))
for config in configs:
name = config['name']
funcs = config['functions']
dims = config['dimensions']
pts = config['points']
# randomness = config['randomness']
noise = config['noise']
models = config['models']
if len(models) < 1:
Logs.pprint('YELLOW', 'ERROR: No model was found in the benchmark \'%s\'' % name)
continue
if len(dims) != len(funcs):
dims = [dims]*len(funcs)
if len(pts) != len(funcs):
pts = [pts]*len(funcs)
cpp_tpl = ""
for line in open("waf_tools/benchmark_template.cpp"):
cpp_tpl += line
cpp_body = ""
for i in range(len(funcs)):
func = funcs[i]
dim = dims[i]
points = pts[i]
dim_init = ', '.join(str(d) for d in dim)
pts_init = ', '.join(str(p) for p in points)
code = " benchmark<" + func + ">(\"" + func.lower() + "\", {" + dim_init + "}, {" + pts_init + "});\n"
cpp_body += code
params_code = ""
gps_learn_code = ""
gps_query_code = ""
for m in range(len(models)):
model = models[m]
gp_type = 'GP'
kernel_type = 'SquaredExpARD'
optimize_noise = 'true'
kernel_params = {}
mean_type = 'NullFunction'
mean_has_defaults = True
mean_params = {}
hp_opt = 'KernelLFOpt'
optimizer = 'Rprop'
optimizer_params = []
if 'type' in model:
gp_type = model['type']
if 'kernel' in model:
kernel_type = model['kernel']['type']
if 'optimize_noise' in model['kernel']:
optimize_noise = (model['kernel']['optimize_noise']).lower()
if 'params' in model['kernel']:
kernel_params = model['kernel']['params']
if 'mean' in model:
mean_type = model['mean']['type']
if 'has_defaults' in model['mean']:
mean_has_defaults = eval((model['mean']['has_defaults']).lower().title())
if 'params' in model['mean']:
mean_params = model['mean']['params']
if 'hp_opt' in model:
hp_opt = model['hp_opt']['type']
optimizer = model['hp_opt']['optimizer']