Commit d81d1821 authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis
Browse files

Merge pull request #97 from resibots/travis_fix

Fix travis
parents 305d9da1 40948003
language: cpp
os:
- linux
sudo: required
dist: trusty
compiler:
- gcc
- clang
sudo: required
install: sudo apt-get update -qq && sudo apt-get install -qq libboost-all-dev libboost-test-dev libeigen3-dev libtbb-dev
env:
global:
- CI_HOME=`pwd`
matrix:
- LIBCMAES=ON NLOPT=ON TBB=ON
- LIBCMAES=ON NLOPT=ON TBB=OFF
- LIBCMAES=ON NLOPT=OFF TBB=ON
- LIBCMAES=ON NLOPT=OFF TBB=OFF
- LIBCMAES=OFF NLOPT=ON TBB=ON
- LIBCMAES=OFF NLOPT=ON TBB=OFF
- LIBCMAES=OFF NLOPT=OFF TBB=ON
- LIBCMAES=OFF NLOPT=OFF TBB=OFF
addons:
apt:
packages:
- libboost1.55-all-dev
- libeigen3-dev
install:
- if [ "$LIBCMAES" = "ON" ]; then 'ci/install_libcmaes.sh' ; fi
- if [ "$NLOPT" = "ON" ]; then 'ci/install_nlopt.sh' ; fi
- if [ "$TBB" = "ON" ]; then 'ci/install_tbb.sh' ; fi
# Change this to your needs
script:
./waf configure && ./waf
\ No newline at end of file
script:
- ./waf configure
- ./waf --tests --alltests -v
limbo
limbo [![Build Status](https://img.shields.io/travis/resibots/limbo.svg)](https://travis-ci.org/resibots/limbo)
=====
A lightweight framework for Bayesian and model-based optimisation of black-box functions (C++11).
......@@ -27,7 +27,7 @@ Main references
- **General introduction:** Brochu, E., Cora, V. M., & De Freitas, N. (2010). A tutorial on Bayesian optimization of expensive cost functions, with application to active user modeling and hierarchical reinforcement learning. *arXiv preprint arXiv:1012.2599*.
- **Gaussian Processes (GP)**: Rasmussen, C. A, Williams C. K. I. (2006). /Gaussian Processes for Machine Learning./ MIT Press.
- **Gaussian Processes (GP)**: Rasmussen, C. A, Williams C. K. I. (2006). /Gaussian Processes for Machine Learning./ MIT Press.
- **Optimizing hyperparameters:** Blum, M., & Riedmiller, M. (2013). Optimization of Gaussian Process Hyperparameters using Rprop. In *European Symposium on Artificial Neural Networks, Computational Intelligence and Machine Learning*.
......@@ -45,4 +45,3 @@ Limbo is a framework for our research that is voluntarily kept small. It is desi
If you need a more full-featured library, check:
- BayesOpt: http://rmcantin.bitbucket.org/html/
- libGP (no optimization): https://github.com/mblum/libgp
sudo apt-get -qq update
sudo apt-get -qq --yes --force-yes install libgtest-dev autoconf automake libtool libgoogle-glog-dev libgflags-dev
cd /usr/src/gtest
sudo mkdir build && cd build
sudo cmake ..
sudo make
sudo cp *.a /usr/lib
cd && git clone https://github.com/beniz/libcmaes.git
cd libcmaes
./autogen.sh
./configure
make
sudo make install
sudo ldconfig
cd $CI_HOME
sudo apt-get -qq update
sudo apt-get -qq --yes --force-yes install autoconf automake
cd && wget http://ab-initio.mit.edu/nlopt/nlopt-2.4.2.tar.gz
tar -zxvf nlopt-2.4.2.tar.gz && cd nlopt-2.4.2
./configure -with-cxx --enable-shared --without-python --without-matlab --without-octave
sudo make install
sudo ldconfig
cd $CI_HOME
sudo apt-get -qq update
sudo apt-get -qq --yes --force-yes install libtbb-dev
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE nlopt_test
#define BOOST_TEST_MODULE test_cmaes
#include <boost/test/unit_test.hpp>
......@@ -20,16 +20,28 @@ opt::eval_t fsphere(const Eigen::VectorXd& params, bool g)
BOOST_AUTO_TEST_CASE(test_cmaes_unbounded)
{
Eigen::VectorXd g = limbo::opt::Cmaes<Params>()(fsphere, Eigen::VectorXd::Zero(2), false);
size_t N = 10;
size_t errors = 0;
for (size_t i = 0; i < N; i++) {
Eigen::VectorXd g = limbo::opt::Cmaes<Params>()(fsphere, Eigen::VectorXd::Zero(2), false);
BOOST_CHECK_SMALL(g(0), 0.00000001);
BOOST_CHECK_SMALL(g(1), 0.00000001);
if (std::abs(g(0)) > 0.00000001 || std::abs(g(1)) > 0.00000001)
errors++;
}
BOOST_CHECK((double(errors) / double(N)) < 0.4);
}
BOOST_AUTO_TEST_CASE(test_cmaes_bounded)
{
Eigen::VectorXd g = limbo::opt::Cmaes<Params>()(fsphere, Eigen::VectorXd::Zero(2), true);
size_t N = 10;
size_t errors = 0;
for (size_t i = 0; i < N; i++) {
Eigen::VectorXd g = limbo::opt::Cmaes<Params>()(fsphere, Eigen::VectorXd::Zero(2), true);
if (std::abs(g(0)) > 0.00000001 || std::abs(g(1)) > 0.00000001)
errors++;
}
BOOST_CHECK_SMALL(g(0), 0.00000001);
BOOST_CHECK_SMALL(g(1), 0.00000001);
BOOST_CHECK((double(errors) / double(N)) < 0.3);
}
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE gp
#define BOOST_TEST_MODULE test_gp
#include <boost/test/unit_test.hpp>
......
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE init_functions
#define BOOST_TEST_MODULE test_init_functions
#include <boost/test/unit_test.hpp>
......@@ -79,8 +79,8 @@ BOOST_AUTO_TEST_CASE(no_init)
Opt_t opt;
opt.optimize(fit_eval());
BOOST_CHECK(opt.observations().size() == 1);
BOOST_CHECK(opt.samples().size() == 1);
BOOST_CHECK(opt.observations().size() == 0);
BOOST_CHECK(opt.samples().size() == 0);
}
BOOST_AUTO_TEST_CASE(random_sampling)
......
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE macros
#define BOOST_TEST_MODULE test_macros
#include <cstring>
......
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE nlopt_test
#define BOOST_TEST_MODULE test_nlopt
#include <boost/test/unit_test.hpp>
......
#define BOOST_TEST_DYN_LINK
#define BOOST_TEST_MODULE optimizers
#define BOOST_TEST_MODULE test_optimizers
#include <boost/test/unit_test.hpp>
......
......@@ -4,7 +4,8 @@
def build(bld):
bld.recurse('examples')
bld.recurse('tests')
if bld.options.tests:
bld.recurse('tests')
bld.recurse('benchmarks')
bld.stlib(source=' \
......
import os
import stat
import subprocess
from waflib.Tools import waf_unit_test
json_ok = True
try:
......@@ -39,6 +40,16 @@ def create_variants(bld, source, uselib_local,
use=uselib_local,
defines=deff)
def summary(bld):
lst = getattr(bld, 'utest_results', [])
total = 0
tfail = 0
if lst:
total = len(lst)
tfail = len([x for x in lst if x[1]])
waf_unit_test.summary(bld)
if tfail > 0:
bld.fatal("Build failed, because some tests failed!")
def _sub_script(tpl, conf_file):
if 'LD_LIBRARY_PATH' in os.environ:
......
......@@ -34,6 +34,8 @@ def options(opt):
opt.add_option('--qsub', type='string', help='config file (json) to submit to torque', dest='qsub')
opt.add_option('--oar', type='string', help='config file (json) to submit to oar', dest='oar')
opt.add_option('--experimental', action='store_true', help='specify to compile the experimental examples', dest='experimental')
# tests
opt.add_option('--tests', action='store_true', help='compile tests or not', dest='tests')
opt.load('xcode')
for i in glob.glob('exp/*'):
if os.path.isdir(i):
......@@ -56,11 +58,12 @@ def configure(conf):
common_flags = "-Wall -std=c++11"
opt_flags = " -O3 -xHost -march=native -mtune=native -unroll -fma -g"
else:
if int(conf.env['CC_VERSION'][0]+conf.env['CC_VERSION'][1]) < 47:
if conf.env.CXX_NAME in ["gcc", "g++"] and int(conf.env['CC_VERSION'][0]+conf.env['CC_VERSION'][1]) < 47:
common_flags = "-Wall -std=c++0x"
else:
common_flags = "-Wall -std=c++11"
common_flags += " -fdiagnostics-color"
if conf.env.CXX_NAME in ["clang", "llvm"]:
common_flags += " -fdiagnostics-color"
opt_flags = " -O3 -march=native -g"
conf.check_boost(lib='serialization filesystem \
......@@ -76,9 +79,6 @@ def configure(conf):
conf.env.INCLUDES_LIMBO = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + "/src"
if conf.env['CXXFLAGS_ODE']:
common_flags += ' ' + conf.env['CXXFLAGS_ODE']
all_flags = common_flags + opt_flags
conf.env['CXXFLAGS'] = conf.env['CXXFLAGS'] + all_flags.split(' ')
print conf.env['CXXFLAGS']
......@@ -94,8 +94,7 @@ def build(bld):
for i in bld.options.exp.split(','):
print 'Building exp: ' + i
bld.recurse('exp/' + i)
from waflib.Tools import waf_unit_test
bld.add_post_fun(waf_unit_test.summary)
bld.add_post_fun(limbo.summary)
def build_extensive_tests(ctx):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment