Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions GPflowOpt/acquisition/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def models(self):

:return: list of GPflow models
"""
return self._models
return self._models.sorted_params

@property
def data(self):
Expand Down Expand Up @@ -318,7 +318,7 @@ def _optimize_models(self):

@Acquisition.models.getter
def models(self):
return ParamList([model for acq in self.operands for model in acq.models.sorted_params])
return [model for acq in self.operands for model in acq.models]

def enable_scaling(self, domain):
for oper in self.operands:
Expand Down
39 changes: 39 additions & 0 deletions GPflowOpt/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,35 @@
from GPflow.model import Model


class ParentHook(object):
"""
Temporary solution for fixing the recompilation issues (#37, GPflow issue #442).

An object of this class is returned when highest_parent is called on a model, which holds references to the highest
parentable, as well as the highest model class. When setting the needs recompile flag, this is intercepted and
performed on the model. At the same time, kill autoflow is called on the highest parent.
"""
def __init__(self, highest_parent, highest_model):
self._hp = highest_parent
self._hm = highest_model

def __getattr__(self, item):
if item is '_needs_recompile':
return getattr(self._hm, item)
return getattr(self._hp, item)

def __setattr__(self, key, value):
if key in ['_hp', '_hm']:
object.__setattr__(self, key, value)
return
if key is '_needs_recompile':
setattr(self._hm, key, value)
if value:
self._hp._kill_autoflow()
else:
setattr(self._hp, key, value)


class ModelWrapper(Parameterized):
"""
Class for fast implementation of a wrapper for models defined in GPflow.
Expand All @@ -25,6 +54,7 @@ class ModelWrapper(Parameterized):
AutoFlow methods are influenced following this pattern, the original AF storage (if existing) is unaffected and a
new storage is added to the subclass.
"""

def __init__(self, model):
"""
:param model: model to be wrapped
Expand All @@ -45,6 +75,7 @@ def __getattr__(self, item):
method = item[1:].rstrip('_AF_storage')
if method in dir(self):
raise AttributeError("{0} has no attribute {1}".format(self.__class__.__name__, item))

return getattr(self.wrapped, item)

def __setattr__(self, key, value):
Expand Down Expand Up @@ -90,3 +121,11 @@ def __eq__(self, other):
def name(self):
name = super(ModelWrapper, self).name
return ".".join([name, str.lower(self.__class__.__name__)])

@Parameterized.highest_parent.getter
def highest_parent(self):
"""
Returns an instance of the ParentHook instead of the usual reference to a Parentable.
"""
original_hp = super(ModelWrapper, self).highest_parent
return original_hp if isinstance(original_hp, ParentHook) else ParentHook(original_hp, self)
2 changes: 1 addition & 1 deletion GPflowOpt/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# limitations under the License.

from GPflow.param import DataHolder, AutoFlow
from GPflow.model import GPModel
from GPflow import settings
import numpy as np
from .transforms import LinearTransform, DataTransform
Expand Down Expand Up @@ -53,6 +52,7 @@ class DataScaler(ModelWrapper):
required, it is the responsibility of the implementation to rescale the hyperparameters. Additionally, applying
hyperpriors should anticipate for the scaled data.
"""

def __init__(self, model, domain=None, normalize_Y=False):
"""
:param model: model to be wrapped
Expand Down
28 changes: 26 additions & 2 deletions testing/test_acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,8 @@ def test_object_integrity(self, acquisition):
for oper in acquisition.operands:
self.assertTrue(isinstance(oper, GPflowOpt.acquisition.Acquisition),
msg="All operands should be an acquisition object")
self.assertTrue(all(isinstance(m, GPflowOpt.models.ModelWrapper) for m in acquisition.models.sorted_params))

self.assertTrue(all(isinstance(m, GPflowOpt.models.ModelWrapper) for m in acquisition.models))

@parameterized.expand(list(zip(aggregations)))
def test_data(self, acquisition):
Expand Down Expand Up @@ -219,7 +220,7 @@ def test_marginalized_score(self, acquisition):

@parameterized.expand(list(zip([aggregations[2]])))
def test_mcmc_acq_models(self, acquisition):
self.assertListEqual(acquisition.models.sorted_params, acquisition.operands[0].models.sorted_params)
self.assertListEqual(acquisition.models, acquisition.operands[0].models)


class TestJointAcquisition(unittest.TestCase):
Expand Down Expand Up @@ -298,3 +299,26 @@ def test_multi_aggr(self):
joint = first * second
self.assertIsInstance(joint, GPflowOpt.acquisition.AcquisitionProduct)
self.assertListEqual(joint.operands.sorted_params, [acq1, acq2, acq3, acq4])


class TestRecompile(unittest.TestCase):
"""
Regression test for #37
"""
def test_vgp(self):
domain = GPflowOpt.domain.UnitCube(2)
X = GPflowOpt.design.RandomDesign(10, domain).generate()
Y = np.sin(X[:,[0]])
m = GPflow.vgp.VGP(X, Y, GPflow.kernels.RBF(2), GPflow.likelihoods.Gaussian())
acq = GPflowOpt.acquisition.ExpectedImprovement(m)
m._compile()
self.assertFalse(m._needs_recompile)
acq.evaluate(GPflowOpt.design.RandomDesign(10, domain).generate())
self.assertTrue(hasattr(acq, '_evaluate_AF_storage'))

Xnew = GPflowOpt.design.RandomDesign(5, domain).generate()
Ynew = np.sin(Xnew[:,[0]])
acq.set_data(np.vstack((X, Xnew)), np.vstack((Y, Ynew)))
self.assertFalse(hasattr(acq, '_needs_recompile'))
self.assertFalse(hasattr(acq, '_evaluate_AF_storage'))
acq.evaluate(GPflowOpt.design.RandomDesign(10, domain).generate())
1 change: 1 addition & 0 deletions testing/test_datascaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,3 +106,4 @@ def test_predict_scaling(self):
fs = n.predict_density(Xt, Yt)
np.testing.assert_allclose(fr, fs, rtol=1e-2)


29 changes: 29 additions & 0 deletions testing/test_modelwrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,35 @@ def test_name(self):
n = MethodOverride(create_parabola_model(GPflowOpt.domain.UnitCube(2)))
self.assertEqual(n.name, 'unnamed.methodoverride')

def test_parent_hook(self):
self.m.optimize(maxiter=5)
w = GPflowOpt.models.ModelWrapper(self.m)
self.assertTrue(isinstance(self.m.highest_parent, GPflowOpt.models.ParentHook))
self.assertEqual(self.m.highest_parent._hp, w)
self.assertEqual(self.m.highest_parent._hm, w)

w2 = GPflowOpt.models.ModelWrapper(w)
self.assertEqual(self.m.highest_parent._hp, w2)
self.assertEqual(self.m.highest_parent._hm, w2)

p = GPflow.param.Parameterized()
p.model = w2
self.assertEqual(self.m.highest_parent._hp, p)
self.assertEqual(self.m.highest_parent._hm, w2)

p.predictor = create_parabola_model(GPflowOpt.domain.UnitCube(2))
p.predictor.predict_f(p.predictor.X.value)
self.assertTrue(hasattr(p.predictor, '_predict_f_AF_storage'))
self.assertFalse(self.m._needs_recompile)
self.m.highest_parent._needs_recompile = True
self.assertFalse('_needs_recompile' in p.__dict__)
self.assertFalse('_needs_recompile' in w.__dict__)
self.assertFalse('_needs_recompile' in w2.__dict__)
self.assertTrue(self.m._needs_recompile)
self.assertFalse(hasattr(p.predictor, '_predict_f_AF_storage'))

self.assertEqual(self.m.highest_parent.get_free_state, p.get_free_state)
self.m.highest_parent._needs_setup = True
self.assertTrue(hasattr(p, '_needs_setup'))
self.assertTrue(p._needs_setup)