Unverified Commit 6241f7fe authored by Konstantinos Chatzilygeroudis's avatar Konstantinos Chatzilygeroudis Committed by GitHub

Merge pull request #274 from resibots/multi_gp

MultiGP
parents 6afca247 1850c5cc
......@@ -13,7 +13,7 @@ We assume that our samples are in a vector called ``samples`` and that our obser
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 79-88
:lines: 77-86
Basic usage
------------
......@@ -23,14 +23,14 @@ We first create a basic GP with an Exponential kernel (``kernel::Exp<Params>``)
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 61-74
:lines: 61-72
The type of the GP is defined by the following lines:
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 89-93
:lines: 87-91
To use the GP, we need :
......@@ -40,7 +40,7 @@ To use the GP, we need :
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 94-99
:lines: 92-97
Here we assume that the noise is the same for all samples and that it is equal to 0.01.
......@@ -57,7 +57,7 @@ To visualize the predictions of the GP, we can query it for many points and reco
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 101-112
:lines: 99-110
Hyper-parameter optimization
......@@ -71,7 +71,7 @@ A new GP type is defined as follows:
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 114-118
:lines: 112-116
It uses the default values for the parameters of ``SquaredExpARD``:
......@@ -85,7 +85,7 @@ After calling the ``compute()`` method, the hyper-parameters can be optimized by
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 121-123
:lines: 119-121
We can have a look at the difference between the two GPs:
......@@ -115,7 +115,7 @@ We can also save our optimized GP model:
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 140-141
:lines: 138-139
This will create a directory called ``myGP`` with several files (the GP data, kernel hyperparameters etc.). If we want a binary format (i.e., more compact), we can replace the ``TextArchive`` by ``BinaryArchive``.
......@@ -124,6 +124,6 @@ To the load a saved model, we can do the following:
.. literalinclude:: ../../src/tutorials/gp.cpp
:language: c++
:linenos:
:lines: 143-144
:lines: 141-142
Note that we need to have the same kernel and mean function (i.e., the same GP type) as the one used for saving.
\ No newline at end of file
......@@ -105,9 +105,6 @@ BO_PARAMS(std::cout,
struct opt_rprop : public defaults::opt_rprop {
};
struct opt_parallelrepeater : public defaults::opt_parallelrepeater {
};
};)
struct fit_eval {
......
......@@ -93,9 +93,6 @@ struct Params {
struct stop_maxiterations {
BO_PARAM(int, iterations, 100);
};
struct opt_parallelrepeater : defaults::opt_parallelrepeater {
};
};
template <typename Params, typename Model>
......
......@@ -50,6 +50,7 @@
///@defgroup model_opt_defaults
#include <limbo/model/gp.hpp>
#include <limbo/model/multi_gp.hpp>
#include <limbo/model/sparsified_gp.hpp>
#include <limbo/model/gp/kernel_lf_opt.hpp>
......
......@@ -80,7 +80,7 @@ namespace limbo {
/// useful because the model might be created before knowing anything about the process
GP() : _dim_in(-1), _dim_out(-1), _inv_kernel_updated(false) {}
/// useful because the model might be created before having samples
/// useful because the model might be created before having samples
GP(int dim_in, int dim_out)
: _dim_in(dim_in), _dim_out(dim_out), _kernel_function(dim_in), _mean_function(dim_out), _inv_kernel_updated(false) {}
......@@ -153,7 +153,7 @@ namespace limbo {
/**
\\rst
return :math:`\mu`, :math:`\sigma^2` (unormalized). If there is no sample, return the value according to the mean function. Using this method instead of separate calls to mu() and sigma() is more efficient because some computations are shared between mu() and sigma().
return :math:`\mu`, :math:`\sigma^2` (un-normalized). If there is no sample, return the value according to the mean function. Using this method instead of separate calls to mu() and sigma() is more efficient because some computations are shared between mu() and sigma().
\\endrst
*/
std::tuple<Eigen::VectorXd, double> query(const Eigen::VectorXd& v) const
......@@ -193,14 +193,14 @@ namespace limbo {
/// return the number of dimensions of the input
int dim_in() const
{
assert(_dim_in != -1); // need to compute first !
assert(_dim_in != -1); // need to compute first!
return _dim_in;
}
/// return the number of dimensions of the output
int dim_out() const
{
assert(_dim_out != -1); // need to compute first !
assert(_dim_out != -1); // need to compute first!
return _dim_out;
}
......
......@@ -48,7 +48,6 @@
#include <Eigen/Core>
#include <limbo/opt/parallel_repeater.hpp>
#include <limbo/opt/rprop.hpp>
namespace limbo {
......@@ -56,7 +55,7 @@ namespace limbo {
namespace gp {
///@ingroup model_opt
///base class for optimization of the hyper-parameters of a GP
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
template <typename Params, typename Optimizer = opt::Rprop<Params>>
struct HPOpt {
public:
HPOpt() : _called(false) {}
......
......@@ -47,14 +47,13 @@
#define LIMBO_MODEL_GP_KERNEL_LF_OPT_HPP
#include <limbo/model/gp/hp_opt.hpp>
#include <limbo/tools/random_generator.hpp>
namespace limbo {
namespace model {
namespace gp {
///@ingroup model_opt
///optimize the likelihood of the kernel only
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
template <typename Params, typename Optimizer = opt::Rprop<Params>>
struct KernelLFOpt : public HPOpt<Params, Optimizer> {
public:
template <typename GP>
......@@ -96,8 +95,8 @@ namespace limbo {
const GP& _original_gp;
};
};
}
}
}
} // namespace gp
} // namespace model
} // namespace limbo
#endif
......@@ -47,14 +47,13 @@
#define LIMBO_MODEL_GP_KERNEL_LOO_OPT_HPP
#include <limbo/model/gp/hp_opt.hpp>
#include <limbo/tools/random_generator.hpp>
namespace limbo {
namespace model {
namespace gp {
///@ingroup model_opt
///optimize the likelihood of the kernel only
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
template <typename Params, typename Optimizer = opt::Rprop<Params>>
struct KernelLooOpt : public HPOpt<Params, Optimizer> {
public:
template <typename GP>
......@@ -96,8 +95,8 @@ namespace limbo {
const GP& _original_gp;
};
};
}
}
}
} // namespace gp
} // namespace model
} // namespace limbo
#endif
......@@ -47,14 +47,13 @@
#define LIMBO_MODEL_GP_KERNEL_MEAN_LF_OPT_HPP
#include <limbo/model/gp/hp_opt.hpp>
#include <limbo/tools/random_generator.hpp>
namespace limbo {
namespace model {
namespace gp {
///@ingroup model_opt
///optimize the likelihood of both the kernel and the mean (try to align the mean function)
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
template <typename Params, typename Optimizer = opt::Rprop<Params>>
struct KernelMeanLFOpt : public HPOpt<Params, Optimizer> {
public:
template <typename GP>
......@@ -109,8 +108,8 @@ namespace limbo {
const GP& _original_gp;
};
};
}
}
}
} // namespace gp
} // namespace model
} // namespace limbo
#endif
......@@ -47,14 +47,13 @@
#define LIMBO_MODEL_GP_MEAN_LF_OPT_HPP
#include <limbo/model/gp/hp_opt.hpp>
#include <limbo/tools/random_generator.hpp>
namespace limbo {
namespace model {
namespace gp {
///@ingroup model_opt
///optimize the likelihood of the mean only (try to align the mean function)
template <typename Params, typename Optimizer = opt::ParallelRepeater<Params, opt::Rprop<Params>>>
template <typename Params, typename Optimizer = opt::Rprop<Params>>
struct MeanLFOpt : public HPOpt<Params, Optimizer> {
public:
template <typename GP>
......@@ -99,8 +98,8 @@ namespace limbo {
GP _original_gp;
};
};
}
}
}
} // namespace gp
} // namespace model
} // namespace limbo
#endif
This diff is collapsed.
//| Copyright Inria May 2015
//| This project has received funding from the European Research Council (ERC) under
//| the European Union's Horizon 2020 research and innovation programme (grant
//| agreement No 637972) - see http://www.resibots.eu
//|
//| Contributor(s):
//| - Jean-Baptiste Mouret (jean-baptiste.mouret@inria.fr)
//| - Antoine Cully (antoinecully@gmail.com)
//| - Konstantinos Chatzilygeroudis (konstantinos.chatzilygeroudis@inria.fr)
//| - Federico Allocati (fede.allocati@gmail.com)
//| - Vaios Papaspyros (b.papaspyros@gmail.com)
//| - Roberto Rama (bertoski@gmail.com)
//|
//| This software is a computer library whose purpose is to optimize continuous,
//| black-box functions. It mainly implements Gaussian processes and Bayesian
//| optimization.
//| Main repository: http://github.com/resibots/limbo
//| Documentation: http://www.resibots.eu/limbo
//|
//| This software is governed by the CeCILL-C license under French law and
//| abiding by the rules of distribution of free software. You can use,
//| modify and/ or redistribute the software under the terms of the CeCILL-C
//| license as circulated by CEA, CNRS and INRIA at the following URL
//| "http://www.cecill.info".
//|
//| As a counterpart to the access to the source code and rights to copy,
//| modify and redistribute granted by the license, users are provided only
//| with a limited warranty and the software's author, the holder of the
//| economic rights, and the successive licensors have only limited
//| liability.
//|
//| In this respect, the user's attention is drawn to the risks associated
//| with loading, using, modifying and/or developing or reproducing the
//| software by the user in light of its specific status of free software,
//| that may mean that it is complicated to manipulate, and that also
//| therefore means that it is reserved for developers and experienced
//| professionals having in-depth computer knowledge. Users are therefore
//| encouraged to load and test the software's suitability as regards their
//| requirements in conditions enabling the security of their systems and/or
//| data to be ensured and, more generally, to use and operate it in the
//| same conditions as regards security.
//|
//| The fact that you are presently reading this means that you have had
//| knowledge of the CeCILL-C license and that you accept its terms.
//|
#ifndef LIMBO_MODEL_MULTI_GP_PARALLEL_LF_OPT_HPP
#define LIMBO_MODEL_MULTI_GP_PARALLEL_LF_OPT_HPP
#include <limbo/model/gp/hp_opt.hpp>
namespace limbo {
namespace model {
namespace multi_gp {
///@ingroup model_opt
///optimize each GP independently in parallel using HyperParamsOptimizer
template <typename Params, typename HyperParamsOptimizer = limbo::model::gp::NoLFOpt<Params>>
struct ParallelLFOpt : public limbo::model::gp::HPOpt<Params> {
public:
template <typename GP>
void operator()(GP& gp)
{
this->_called = true;
auto& gps = gp.gp_models();
limbo::tools::par::loop(0, gps.size(), [&](size_t i) {
HyperParamsOptimizer hp_optimize;
hp_optimize(gps[i]);
});
}
};
} // namespace multi_gp
} // namespace model
} // namespace limbo
#endif
\ No newline at end of file
This diff is collapsed.
......@@ -69,8 +69,6 @@ struct Params {
};
struct opt_rprop : public limbo::defaults::opt_rprop {
};
struct opt_parallelrepeater : public limbo::defaults::opt_parallelrepeater {
};
struct kernel_maternfivehalves {
BO_PARAM(double, sigma_sq, 1);
......@@ -95,8 +93,6 @@ struct LoadParams {
};
struct opt_rprop : public limbo::defaults::opt_rprop {
};
struct opt_parallelrepeater : public limbo::defaults::opt_parallelrepeater {
};
struct kernel_maternfivehalves {
BO_PARAM(double, sigma_sq, 2.);
......
......@@ -69,8 +69,6 @@ struct Params {
};
struct opt_rprop : public defaults::opt_rprop {
};
struct opt_parallelrepeater : public defaults::opt_parallelrepeater {
};
};
int main(int argc, char** argv)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment