diff --git a/GPflowOpt/acquisition/acquisition.py b/GPflowOpt/acquisition/acquisition.py index 6050aa7..463478b 100644 --- a/GPflowOpt/acquisition/acquisition.py +++ b/GPflowOpt/acquisition/acquisition.py @@ -174,7 +174,7 @@ def models(self): :return: list of GPflow models """ - return self._models + return self._models.sorted_params @property def data(self): @@ -318,7 +318,7 @@ def _optimize_models(self): @Acquisition.models.getter def models(self): - return ParamList([model for acq in self.operands for model in acq.models.sorted_params]) + return [model for acq in self.operands for model in acq.models] def enable_scaling(self, domain): for oper in self.operands: diff --git a/GPflowOpt/models.py b/GPflowOpt/models.py index 0cdda58..d1c1712 100644 --- a/GPflowOpt/models.py +++ b/GPflowOpt/models.py @@ -15,6 +15,35 @@ from GPflow.model import Model +class ParentHook(object): + """ + Temporary solution for fixing the recompilation issues (#37, GPflow issue #442). + + An object of this class is returned when highest_parent is called on a model, which holds references to the highest + parentable, as well as the highest model class. When setting the needs recompile flag, this is intercepted and + performed on the model. At the same time, kill autoflow is called on the highest parent. + """ + def __init__(self, highest_parent, highest_model): + self._hp = highest_parent + self._hm = highest_model + + def __getattr__(self, item): + if item is '_needs_recompile': + return getattr(self._hm, item) + return getattr(self._hp, item) + + def __setattr__(self, key, value): + if key in ['_hp', '_hm']: + object.__setattr__(self, key, value) + return + if key is '_needs_recompile': + setattr(self._hm, key, value) + if value: + self._hp._kill_autoflow() + else: + setattr(self._hp, key, value) + + class ModelWrapper(Parameterized): """ Class for fast implementation of a wrapper for models defined in GPflow. @@ -25,6 +54,7 @@ class ModelWrapper(Parameterized): AutoFlow methods are influenced following this pattern, the original AF storage (if existing) is unaffected and a new storage is added to the subclass. """ + def __init__(self, model): """ :param model: model to be wrapped @@ -45,6 +75,7 @@ def __getattr__(self, item): method = item[1:].rstrip('_AF_storage') if method in dir(self): raise AttributeError("{0} has no attribute {1}".format(self.__class__.__name__, item)) + return getattr(self.wrapped, item) def __setattr__(self, key, value): @@ -90,3 +121,11 @@ def __eq__(self, other): def name(self): name = super(ModelWrapper, self).name return ".".join([name, str.lower(self.__class__.__name__)]) + + @Parameterized.highest_parent.getter + def highest_parent(self): + """ + Returns an instance of the ParentHook instead of the usual reference to a Parentable. + """ + original_hp = super(ModelWrapper, self).highest_parent + return original_hp if isinstance(original_hp, ParentHook) else ParentHook(original_hp, self) diff --git a/GPflowOpt/scaling.py b/GPflowOpt/scaling.py index fb6d140..7442e2c 100644 --- a/GPflowOpt/scaling.py +++ b/GPflowOpt/scaling.py @@ -13,7 +13,6 @@ # limitations under the License. from GPflow.param import DataHolder, AutoFlow -from GPflow.model import GPModel from GPflow import settings import numpy as np from .transforms import LinearTransform, DataTransform @@ -53,6 +52,7 @@ class DataScaler(ModelWrapper): required, it is the responsibility of the implementation to rescale the hyperparameters. Additionally, applying hyperpriors should anticipate for the scaled data. """ + def __init__(self, model, domain=None, normalize_Y=False): """ :param model: model to be wrapped diff --git a/testing/test_acquisition.py b/testing/test_acquisition.py index 999f2ae..7105ffb 100644 --- a/testing/test_acquisition.py +++ b/testing/test_acquisition.py @@ -146,7 +146,8 @@ def test_object_integrity(self, acquisition): for oper in acquisition.operands: self.assertTrue(isinstance(oper, GPflowOpt.acquisition.Acquisition), msg="All operands should be an acquisition object") - self.assertTrue(all(isinstance(m, GPflowOpt.models.ModelWrapper) for m in acquisition.models.sorted_params)) + + self.assertTrue(all(isinstance(m, GPflowOpt.models.ModelWrapper) for m in acquisition.models)) @parameterized.expand(list(zip(aggregations))) def test_data(self, acquisition): @@ -219,7 +220,7 @@ def test_marginalized_score(self, acquisition): @parameterized.expand(list(zip([aggregations[2]]))) def test_mcmc_acq_models(self, acquisition): - self.assertListEqual(acquisition.models.sorted_params, acquisition.operands[0].models.sorted_params) + self.assertListEqual(acquisition.models, acquisition.operands[0].models) class TestJointAcquisition(unittest.TestCase): @@ -298,3 +299,26 @@ def test_multi_aggr(self): joint = first * second self.assertIsInstance(joint, GPflowOpt.acquisition.AcquisitionProduct) self.assertListEqual(joint.operands.sorted_params, [acq1, acq2, acq3, acq4]) + + +class TestRecompile(unittest.TestCase): + """ + Regression test for #37 + """ + def test_vgp(self): + domain = GPflowOpt.domain.UnitCube(2) + X = GPflowOpt.design.RandomDesign(10, domain).generate() + Y = np.sin(X[:,[0]]) + m = GPflow.vgp.VGP(X, Y, GPflow.kernels.RBF(2), GPflow.likelihoods.Gaussian()) + acq = GPflowOpt.acquisition.ExpectedImprovement(m) + m._compile() + self.assertFalse(m._needs_recompile) + acq.evaluate(GPflowOpt.design.RandomDesign(10, domain).generate()) + self.assertTrue(hasattr(acq, '_evaluate_AF_storage')) + + Xnew = GPflowOpt.design.RandomDesign(5, domain).generate() + Ynew = np.sin(Xnew[:,[0]]) + acq.set_data(np.vstack((X, Xnew)), np.vstack((Y, Ynew))) + self.assertFalse(hasattr(acq, '_needs_recompile')) + self.assertFalse(hasattr(acq, '_evaluate_AF_storage')) + acq.evaluate(GPflowOpt.design.RandomDesign(10, domain).generate()) diff --git a/testing/test_datascaler.py b/testing/test_datascaler.py index 51f6670..13fe452 100644 --- a/testing/test_datascaler.py +++ b/testing/test_datascaler.py @@ -106,3 +106,4 @@ def test_predict_scaling(self): fs = n.predict_density(Xt, Yt) np.testing.assert_allclose(fr, fs, rtol=1e-2) + diff --git a/testing/test_modelwrapper.py b/testing/test_modelwrapper.py index 0046627..6266b74 100644 --- a/testing/test_modelwrapper.py +++ b/testing/test_modelwrapper.py @@ -111,6 +111,35 @@ def test_name(self): n = MethodOverride(create_parabola_model(GPflowOpt.domain.UnitCube(2))) self.assertEqual(n.name, 'unnamed.methodoverride') + def test_parent_hook(self): + self.m.optimize(maxiter=5) + w = GPflowOpt.models.ModelWrapper(self.m) + self.assertTrue(isinstance(self.m.highest_parent, GPflowOpt.models.ParentHook)) + self.assertEqual(self.m.highest_parent._hp, w) + self.assertEqual(self.m.highest_parent._hm, w) + + w2 = GPflowOpt.models.ModelWrapper(w) + self.assertEqual(self.m.highest_parent._hp, w2) + self.assertEqual(self.m.highest_parent._hm, w2) + p = GPflow.param.Parameterized() + p.model = w2 + self.assertEqual(self.m.highest_parent._hp, p) + self.assertEqual(self.m.highest_parent._hm, w2) + p.predictor = create_parabola_model(GPflowOpt.domain.UnitCube(2)) + p.predictor.predict_f(p.predictor.X.value) + self.assertTrue(hasattr(p.predictor, '_predict_f_AF_storage')) + self.assertFalse(self.m._needs_recompile) + self.m.highest_parent._needs_recompile = True + self.assertFalse('_needs_recompile' in p.__dict__) + self.assertFalse('_needs_recompile' in w.__dict__) + self.assertFalse('_needs_recompile' in w2.__dict__) + self.assertTrue(self.m._needs_recompile) + self.assertFalse(hasattr(p.predictor, '_predict_f_AF_storage')) + + self.assertEqual(self.m.highest_parent.get_free_state, p.get_free_state) + self.m.highest_parent._needs_setup = True + self.assertTrue(hasattr(p, '_needs_setup')) + self.assertTrue(p._needs_setup)