optimizer.hpp 1.41 KB
Newer Older
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
1
2
3
4
5
6
7
8
9
10
11
#ifndef LIMBO_OPT_OPTIMIZER_HPP
#define LIMBO_OPT_OPTIMIZER_HPP

#include <tuple>

#include <Eigen/Core>

#include <boost/optional.hpp>

namespace limbo {
    namespace opt {
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
12
        /// return type of the function to optimize
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
13
14
        typedef std::pair<double, boost::optional<Eigen::VectorXd>> eval_t;

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
15
        /// return with opt::no_grad(your_val) if no gradient is available (to be used in functions to be optimized)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
16
17
        eval_t no_grad(double x) { return eval_t{x, boost::optional<Eigen::VectorXd>{}}; }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
18
        /// get the gradient from a function evaluation (eval_t)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
19
20
21
22
23
24
        const Eigen::VectorXd& grad(const eval_t& fg)
        {
            assert(std::get<1>(fg).is_initialized());
            return std::get<1>(fg).get();
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
25
        /// get the value from a function evaluation (eval_t)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
26
27
28
29
30
        double fun(const eval_t& fg)
        {
            return std::get<0>(fg);
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
31
        /// Evaluate f without gradient (to be called from the optimization algorithms that do not use the gradient)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
32
33
34
35
36
37
        template <typename F>
        double eval(const F& f, const Eigen::VectorXd& x)
        {
            return std::get<0>(f(x, false));
        }

Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
38
        /// Evaluate f with gradient (to be called from the optimization algorithms that use the gradient)
Konstantinos Chatzilygeroudis's avatar
Konstantinos Chatzilygeroudis committed
39
40
41
42
43
        template <typename F>
        eval_t eval_grad(const F& f, const Eigen::VectorXd& x)
        {
            return f(x, true);
        }
Jean-Baptiste Mouret's avatar
Jean-Baptiste Mouret committed
44
45
46
47
    }
}

#endif