Commit 98a07959 authored by Vaios Papaspyros's avatar Vaios Papaspyros
Browse files

Adding waf create option

parent 245c85db
......@@ -242,3 +242,107 @@ def output_params(folder):
text_file = open("params_"+folder[4:]+".txt", "w")
text_file.write(output)
text_file.close()
def create_exp(name):
ws_tpl = """
#! /usr/bin/env python
def configure(conf):
pass
def options(opt):
pass
def build(bld):
bld(features='cxx cxxprogram',
source='basic_example.cpp',
includes='. ../../src',
target='basic_example',
uselib='BOOST EIGEN TBB LIBCMAES NLOPT',
use='limbo')
"""
if not os.path.exists('exp'):
os.makedirs('exp')
os.mkdir('exp/' + name)
wscript = open('exp/' + name + "/wscript", "w")
wscript.write(ws_tpl.replace('@exp', name))
basic_example_tpl = """
// please see the explanation in the documentation
// http://www.resibots.eu/limbo/tutorials/basic_example.html
#include <iostream>
// you can also include <limbo/limbo.hpp> but it will slow down the compilation
#include <limbo/bayes_opt/boptimizer.hpp>
using namespace limbo;
struct Params {
// no noise
struct bayes_opt_boptimizer : public defaults::bayes_opt_boptimizer {
BO_PARAM(double, noise, 0.0);
};
// depending on which internal optimizer we use, we need to import different parameters
#ifdef USE_LIBCMAES
struct opt_cmaes : public defaults::opt_cmaes {
};
#elif defined(USE_NLOPT)
struct opt_nloptnograd : public defaults::opt_nloptnograd {
};
#else
struct opt_gridsearch : public defaults::opt_gridsearch {
};
#endif
// enable / disable the writing of the result files
struct bayes_opt_bobase {
BO_PARAM(int, stats_enabled, true);
};
struct kernel_exp : public defaults::kernel_exp {
};
// we use 10 random samples to initialize the algorithm
struct init_randomsampling {
BO_PARAM(int, samples, 10);
};
// we stop after 40 iterations
struct stop_maxiterations {
BO_PARAM(int, iterations, 40);
};
// we use the default parameters for acqui_ucb
struct acqui_ucb : public defaults::acqui_ucb {
};
};
struct Eval {
// number of input dimension (x.size())
static constexpr size_t dim_in = 1;
// number of dimenions of the result (res.size())
static constexpr size_t dim_out = 1;
// the function to be optimized
Eigen::VectorXd operator()(const Eigen::VectorXd& x) const
{
double y = -((5 * x(0) - 2.5) * (5 * x(0) - 2.5)) + 5;
// we return a 1-dimensional vector
return tools::make_vector(y);
}
};
int main()
{
// we use the default acquisition function / model / stat / etc.
bayes_opt::BOptimizer<Params> boptimizer;
// run the evaluation
boptimizer.optimize(Eval());
// the best sample found
std::cout << "Best sample: " << boptimizer.best_sample()(0) << " - Best observation: " << boptimizer.best_observation()(0) << std::endl;
return 0;
}
"""
basic_example = open('exp/' + name + "/basic_example.cpp", "w")
basic_example.write(basic_example_tpl.replace('@exp', name))
......@@ -28,6 +28,7 @@ def options(opt):
opt.load('nlopt')
opt.load('libcmaes')
opt.add_option('--create', type='string', help='create a new exp', dest='create_exp')
opt.add_option('--exp', type='string', help='exp(s) to build, separate by comma', dest='exp')
opt.add_option('--qsub', type='string', help='config file (json) to submit to torque', dest='qsub')
opt.add_option('--oar', type='string', help='config file (json) to submit to oar', dest='oar')
......@@ -151,6 +152,8 @@ def run_benchmark(ctx):
retcode = subprocess.call(s, shell=True, env=None)
def shutdown(ctx):
if ctx.options.create_exp:
limbo.create_exp(ctx.options.create_exp)
if ctx.options.qsub:
limbo.qsub(ctx.options.qsub)
if ctx.options.oar:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment