optimizer.hpp 3.98 KB
Newer Older
1
2
3
4
//| Copyright Inria May 2015
//| This project has received funding from the European Research Council (ERC) under
//| the European Union's Horizon 2020 research and innovation programme (grant
//| agreement No 637972) - see http://www.resibots.eu
5
//|
6
7
8
//| Contributor(s):
//|   - Jean-Baptiste Mouret (jean-baptiste.mouret@inria.fr)
//|   - Antoine Cully (antoinecully@gmail.com)
9
//|   - Konstantinos Chatzilygeroudis (konstantinos.chatzilygeroudis@inria.fr)
10
11
//|   - Federico Allocati (fede.allocati@gmail.com)
//|   - Vaios Papaspyros (b.papaspyros@gmail.com)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
12
//|   - Roberto Rama (bertoski@gmail.com)
13
//|
14
15
16
17
18
//| This software is a computer library whose purpose is to optimize continuous,
//| black-box functions. It mainly implements Gaussian processes and Bayesian
//| optimization.
//| Main repository: http://github.com/resibots/limbo
//| Documentation: http://www.resibots.eu/limbo
19
//|
20
21
22
23
24
//| This software is governed by the CeCILL-C license under French law and
//| abiding by the rules of distribution of free software.  You can  use,
//| modify and/ or redistribute the software under the terms of the CeCILL-C
//| license as circulated by CEA, CNRS and INRIA at the following URL
//| "http://www.cecill.info".
25
//|
26
27
28
29
30
//| As a counterpart to the access to the source code and  rights to copy,
//| modify and redistribute granted by the license, users are provided only
//| with a limited warranty  and the software's author,  the holder of the
//| economic rights,  and the successive licensors  have only  limited
//| liability.
31
//|
32
33
34
35
36
37
38
39
40
41
//| In this respect, the user's attention is drawn to the risks associated
//| with loading,  using,  modifying and/or developing or reproducing the
//| software by the user in light of its specific status of free software,
//| that may mean  that it is complicated to manipulate,  and  that  also
//| therefore means  that it is reserved for developers  and  experienced
//| professionals having in-depth computer knowledge. Users are therefore
//| encouraged to load and test the software's suitability as regards their
//| requirements in conditions enabling the security of their systems and/or
//| data to be ensured and,  more generally, to use and operate it in the
//| same conditions as regards security.
42
//|
43
44
//| The fact that you are presently reading this means that you have had
//| knowledge of the CeCILL-C license and that you accept its terms.
45
//|
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
46
47
48
49
50
51
52
53
54
55
#ifndef LIMBO_OPT_OPTIMIZER_HPP
#define LIMBO_OPT_OPTIMIZER_HPP

#include <tuple>

#include <Eigen/Core>

#include <boost/optional.hpp>

namespace limbo {
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
56
    ///\defgroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
57
    namespace opt {
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
58
59

        ///@ingroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
60
        /// return type of the function to optimize
61
        using eval_t = std::pair<double, boost::optional<Eigen::VectorXd>>;
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
62

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
63
64
        ///@ingroup opt_tools
        ///return with opt::no_grad(your_val) if no gradient is available (to be used in functions to be optimized)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
65
66
        eval_t no_grad(double x) { return eval_t{x, boost::optional<Eigen::VectorXd>{}}; }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
67
        ///@ingroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
68
        /// get the gradient from a function evaluation (eval_t)
69
        inline const Eigen::VectorXd& grad(const eval_t& fg)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
70
71
72
73
74
        {
            assert(std::get<1>(fg).is_initialized());
            return std::get<1>(fg).get();
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
75
        ///@ingroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
76
        /// get the value from a function evaluation (eval_t)
77
        inline double fun(const eval_t& fg)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
78
79
80
81
        {
            return std::get<0>(fg);
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
82
        ///@ingroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
83
        /// Evaluate f without gradient (to be called from the optimization algorithms that do not use the gradient)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
84
        template <typename F>
85
        inline double eval(const F& f, const Eigen::VectorXd& x)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
86
87
88
89
        {
            return std::get<0>(f(x, false));
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
90
        ///@ingroup opt_tools
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
91
        /// Evaluate f with gradient (to be called from the optimization algorithms that use the gradient)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
92
        template <typename F>
93
        inline eval_t eval_grad(const F& f, const Eigen::VectorXd& x)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
94
95
96
        {
            return f(x, true);
        }
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
97
98
99
100
    }
}

#endif