Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Matthias Mayr
limbo
Commits
74ab2f05
Commit
74ab2f05
authored
Oct 07, 2016
by
Konstantinos Chatzilygeroudis
Committed by
GitHub
Oct 07, 2016
Browse files
Merge pull request #180 from resibots/experimental_fixes
Experimental fixes
parents
3414eab0
e5cc758c
Changes
3
Hide whitespace changes
Inline
Side-by-side
src/examples/experimental/cbo.cpp
View file @
74ab2f05
...
...
@@ -6,7 +6,7 @@
using
namespace
limbo
;
struct
Params
{
struct
c
bayes_opt_boptimizer
:
public
defaults
::
c
bayes_opt_boptimizer
{
struct
bayes_opt_
c
boptimizer
:
public
defaults
::
bayes_opt_
c
boptimizer
{
BO_PARAM
(
double
,
noise
,
0.01
);
};
...
...
src/limbo/experimental/acqui/eci.hpp
View file @
74ab2f05
...
...
@@ -72,8 +72,10 @@ namespace limbo {
size_t
dim_out
()
const
{
return
_model
.
dim_out
();
}
template
<
typename
AggregatorFunction
>
double
operator
()(
const
Eigen
::
VectorXd
&
v
,
const
AggregatorFunction
&
afun
)
opt
::
eval_t
operator
()(
const
Eigen
::
VectorXd
&
v
,
const
AggregatorFunction
&
afun
,
bool
gradient
)
{
assert
(
!
gradient
);
Eigen
::
VectorXd
mu
;
double
sigma_sq
;
std
::
tie
(
mu
,
sigma_sq
)
=
_model
.
query
(
v
);
...
...
@@ -81,7 +83,7 @@ namespace limbo {
// If \sigma(x) = 0 or we do not have any observation yet we return 0
if
(
sigma
<
1e-10
||
_model
.
samples
().
size
()
<
1
)
return
0.0
;
return
opt
::
no_grad
(
0.0
)
;
// Compute expected constrained improvement
// First find the best (predicted) observation so far -- if needed
...
...
@@ -100,7 +102,7 @@ namespace limbo {
double
phi
=
std
::
exp
(
-
0.5
*
std
::
pow
(
Z
,
2.0
))
/
std
::
sqrt
(
2.0
*
M_PI
);
double
Phi
=
0.5
*
std
::
erfc
(
-
Z
/
std
::
sqrt
(
2
));
return
_pf
(
v
,
afun
)
*
(
X
*
Phi
+
sigma
*
phi
);
return
opt
::
no_grad
(
_pf
(
v
,
afun
)
*
(
X
*
Phi
+
sigma
*
phi
)
)
;
}
protected:
...
...
src/limbo/experimental/bayes_opt/cboptimizer.hpp
View file @
74ab2f05
...
...
@@ -66,9 +66,10 @@
namespace
limbo
{
namespace
defaults
{
struct
c
bayes_opt_boptimizer
{
struct
bayes_opt_
c
boptimizer
{
BO_PARAM
(
double
,
noise
,
1e-6
);
BO_PARAM
(
int
,
hp_period
,
-
1
);
BO_PARAM
(
bool
,
bounded
,
true
);
};
}
...
...
@@ -155,9 +156,9 @@ namespace limbo {
if
(
!
this
->
_observations
.
empty
())
{
_split_observations
();
_model
.
compute
(
this
->
_samples
,
_obs
[
0
],
Eigen
::
VectorXd
::
Constant
(
_obs
[
0
].
size
(),
Params
::
c
bayes_opt_boptimizer
::
noise
()));
_model
.
compute
(
this
->
_samples
,
_obs
[
0
],
Eigen
::
VectorXd
::
Constant
(
_obs
[
0
].
size
(),
Params
::
bayes_opt_
c
boptimizer
::
noise
()));
if
(
_nb_constraints
>
0
)
_constraint_model
.
compute
(
this
->
_samples
,
_obs
[
1
],
Eigen
::
VectorXd
::
Constant
(
_obs
[
1
].
size
(),
Params
::
c
bayes_opt_boptimizer
::
noise
()));
_constraint_model
.
compute
(
this
->
_samples
,
_obs
[
1
],
Eigen
::
VectorXd
::
Constant
(
_obs
[
1
].
size
(),
Params
::
bayes_opt_
c
boptimizer
::
noise
()));
}
else
{
_model
=
model_t
(
StateFunction
::
dim_in
,
StateFunction
::
dim_out
);
...
...
@@ -170,21 +171,20 @@ namespace limbo {
while
(
!
this
->
_stop
(
*
this
,
afun
))
{
acquisition_function_t
acqui
(
_model
,
_constraint_model
,
this
->
_current_iteration
);
// we do not have gradient in our current acquisition function
auto
acqui_optimization
=
[
&
](
const
Eigen
::
VectorXd
&
x
,
bool
g
)
{
return
opt
::
no_grad
(
acqui
(
x
,
afun
)
);
};
Eigen
::
VectorXd
starting_point
=
tools
::
random_vector
(
StateFunction
::
dim_in
);
Eigen
::
VectorXd
new_sample
=
acqui_optimizer
(
acqui_optimization
,
starting_point
,
true
);
[
&
](
const
Eigen
::
VectorXd
&
x
,
bool
g
)
{
return
acqui
(
x
,
afun
,
g
);
};
Eigen
::
VectorXd
starting_point
=
tools
::
random_vector
(
StateFunction
::
dim_in
,
Params
::
bayes_opt_cboptimizer
::
bounded
()
);
Eigen
::
VectorXd
new_sample
=
acqui_optimizer
(
acqui_optimization
,
starting_point
,
Params
::
bayes_opt_cboptimizer
::
bounded
()
);
this
->
eval_and_add
(
sfun
,
new_sample
);
this
->
_update_stats
(
*
this
,
afun
);
_model
.
add_sample
(
this
->
_samples
.
back
(),
_obs
[
0
].
back
(),
Params
::
c
bayes_opt_boptimizer
::
noise
());
_model
.
add_sample
(
this
->
_samples
.
back
(),
_obs
[
0
].
back
(),
Params
::
bayes_opt_
c
boptimizer
::
noise
());
if
(
_nb_constraints
>
0
)
_constraint_model
.
add_sample
(
this
->
_samples
.
back
(),
_obs
[
1
].
back
(),
Params
::
c
bayes_opt_boptimizer
::
noise
());
_constraint_model
.
add_sample
(
this
->
_samples
.
back
(),
_obs
[
1
].
back
(),
Params
::
bayes_opt_
c
boptimizer
::
noise
());
if
(
Params
::
c
bayes_opt_boptimizer
::
hp_period
()
>
0
&&
(
this
->
_current_iteration
+
1
)
%
Params
::
c
bayes_opt_boptimizer
::
hp_period
()
==
0
)
{
if
(
Params
::
bayes_opt_
c
boptimizer
::
hp_period
()
>
0
&&
(
this
->
_current_iteration
+
1
)
%
Params
::
bayes_opt_
c
boptimizer
::
hp_period
()
==
0
)
{
_model
.
optimize_hyperparams
();
if
(
_nb_constraints
>
0
)
_constraint_model
.
optimize_hyperparams
();
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment