-
Notifications
You must be signed in to change notification settings - Fork 61
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Callback #73
Callback #73
Changes from all commits
2fd99a6
98aad5e
107b5c5
abcff50
4d9b669
58b9482
86fedb1
18a4436
1282569
cb8421c
3331fa0
0d5d4bb
339b699
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -214,8 +214,8 @@ def test_optimize_multi_objective(self): | |
result = optimizer.optimize(vlmop2, n_iter=2) | ||
self.assertTrue(result.success) | ||
self.assertEqual(result.nfev, 2, "Only 2 evaluations permitted") | ||
self.assertTupleEqual(result.x.shape, (9, 2)) | ||
self.assertTupleEqual(result.fun.shape, (9, 2)) | ||
self.assertTupleEqual(result.x.shape, (7, 2)) | ||
self.assertTupleEqual(result.fun.shape, (7, 2)) | ||
_, dom = gpflowopt.pareto.non_dominated_sort(result.fun) | ||
self.assertTrue(np.all(dom==0)) | ||
|
||
|
@@ -288,6 +288,71 @@ def test_mcmc(self): | |
self.assertTrue(np.allclose(result.x, 0), msg="Optimizer failed to find optimum") | ||
self.assertTrue(np.allclose(result.fun, 0), msg="Incorrect function value returned") | ||
|
||
def test_callback(self): | ||
class DummyCallback(object): | ||
def __init__(self): | ||
self.counter = 0 | ||
|
||
def __call__(self, models): | ||
self.counter += 1 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Lets think about the callback signature some more. Is there any information we want to pass that might be useful for model building? For instance, to let the model building strategy depend on the iteration number (we can stop optimizing the hyps after a while like in the MES paper). Although we can also look at the data set size. What about model building strategies that changes model.X en model.Y (like replace clusters etc.). Not sure if that fits here or is even relevant (the GPflow model should be able to cope with it). There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think the model contains all the data you need to accomplish something. I believe X and Y can even be updated in this callback as long as the model supports it (all models in GPflow do). If at some point some information is really missing, this can be added. |
||
|
||
c = DummyCallback() | ||
optimizer = gpflowopt.BayesianOptimizer(self.domain, self.acquisition, callback=c) | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=2) | ||
self.assertEqual(c.counter, 2) | ||
|
||
def test_callback_recompile(self): | ||
class DummyCallback(object): | ||
def __init__(self): | ||
self.recompile = False | ||
|
||
def __call__(self, models): | ||
c = np.random.randint(2, 10) | ||
models[0].kern.variance.prior = gpflow.priors.Gamma(c, 1./c) | ||
self.recompile = models[0]._needs_recompile | ||
|
||
c = DummyCallback() | ||
optimizer = gpflowopt.BayesianOptimizer(self.domain, self.acquisition, callback=c) | ||
self.acquisition.evaluate(np.zeros((1,2))) # Make sure its run and setup to skip | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=1) | ||
self.assertFalse(c.recompile) | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=1) | ||
self.assertTrue(c.recompile) | ||
self.assertFalse(self.acquisition.models[0]._needs_recompile) | ||
|
||
def test_callback_recompile_mcmc(self): | ||
class DummyCallback(object): | ||
def __init__(self): | ||
self.no_models = 0 | ||
|
||
def __call__(self, models): | ||
c = np.random.randint(2, 10) | ||
models[0].kern.variance.prior = gpflow.priors.Gamma(c, 1. / c) | ||
self.no_models = len(models) | ||
|
||
c = DummyCallback() | ||
optimizer = gpflowopt.BayesianOptimizer(self.domain, self.acquisition, hyper_draws=5, callback=c) | ||
opers = optimizer.acquisition.operands | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=1) | ||
self.assertEqual(c.no_models, 1) | ||
self.assertEqual(id(opers[0]), id(optimizer.acquisition.operands[0])) | ||
for op1, op2 in zip(opers[1:], optimizer.acquisition.operands[1:]): | ||
self.assertNotEqual(id(op1), id(op2)) | ||
opers = optimizer.acquisition.operands | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=1) | ||
self.assertEqual(id(opers[0]), id(optimizer.acquisition.operands[0])) | ||
for op1, op2 in zip(opers[1:], optimizer.acquisition.operands[1:]): | ||
self.assertNotEqual(id(op1), id(op2)) | ||
|
||
def test_nongpr_model(self): | ||
design = gpflowopt.design.LatinHyperCube(16, self.domain) | ||
X, Y = design.generate(), parabola2d(design.generate())[0] | ||
m = gpflow.vgp.VGP(X, Y, gpflow.kernels.RBF(2, ARD=True), likelihood=gpflow.likelihoods.Gaussian()) | ||
acq = gpflowopt.acquisition.ExpectedImprovement(m) | ||
optimizer = gpflowopt.BayesianOptimizer(self.domain, acq) | ||
result = optimizer.optimize(lambda X: parabola2d(X)[0], n_iter=1) | ||
self.assertTrue(result.success) | ||
|
||
|
||
class TestSilentOptimization(unittest.TestCase): | ||
@contextmanager | ||
|
@@ -323,3 +388,4 @@ def _optimize(self, objective): | |
opt.optimize(None) | ||
output = out.getvalue().strip() | ||
self.assertEqual(output, '') | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
maybe show a warning?