Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions GPflowOpt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,6 @@
from . import scaling
from . import objective
from . import pareto
from . import models

from ._version import __version__
6 changes: 5 additions & 1 deletion GPflowOpt/acquisition/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@

from ..scaling import DataScaler
from ..domain import UnitCube
from ..models import ModelWrapper

from GPflow.param import Parameterized, AutoFlow, ParamList
from GPflow.model import Model
from GPflow import settings

import numpy as np
Expand Down Expand Up @@ -48,7 +50,9 @@ def __init__(self, models=[], optimize_restarts=5):
:param optimize_restarts: number of optimization restarts to use when training the models
"""
super(Acquisition, self).__init__()
self._models = ParamList([DataScaler(m) for m in np.atleast_1d(models).tolist()])
models = np.atleast_1d(models)
assert all(isinstance(model, (Model, ModelWrapper)) for model in models)
self._models = ParamList([DataScaler(m) for m in models])

assert (optimize_restarts >= 0)
self.optimize_restarts = optimize_restarts
Expand Down
1 change: 0 additions & 1 deletion GPflowOpt/acquisition/ei.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ def __init__(self, model):
:param model: GPflow model (single output) representing our belief of the objective
"""
super(ExpectedImprovement, self).__init__(model)
assert (isinstance(model, Model))
self.fmin = DataHolder(np.zeros(1))
self.setup()

Expand Down
92 changes: 92 additions & 0 deletions GPflowOpt/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
# Copyright 2017 Joachim van der Herten
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from GPflow.param import Parameterized
from GPflow.model import Model


class ModelWrapper(Parameterized):
"""
Class for fast implementation of a wrapper for models defined in GPflow.

Once wrapped, all lookups for attributes which are not found in the wrapper class are automatically forwarded
to the wrapped model. To influence the I/O of methods on the wrapped class, simply implement the method in the
wrapper and call the appropriate methods on the wrapped class. Specific logic is included to make sure that if
AutoFlow methods are influenced following this pattern, the original AF storage (if existing) is unaffected and a
new storage is added to the subclass.
"""
def __init__(self, model):
"""
:param model: model to be wrapped
"""
super(ModelWrapper, self).__init__()

assert isinstance(model, (Model, ModelWrapper))
#: Wrapped model
self.wrapped = model

def __getattr__(self, item):
"""
If an attribute is not found in this class, it is searched in the wrapped model
"""
# Exception for AF storages, if a method with the same name exists in this class, do not find the cache
# in the wrapped model.
if item.endswith('_AF_storage'):
method = item[1:].rstrip('_AF_storage')
if method in dir(self):
raise AttributeError("{0} has no attribute {1}".format(self.__class__.__name__, item))
return getattr(self.wrapped, item)

def __setattr__(self, key, value):
"""
1) If setting :attr:`wrapped` attribute, point parent to this object (the ModelWrapper).
2) Setting attributes in the right objects. The following rules are processed in order:
(a) If attribute exists in wrapper, set in wrapper.
(b) If no object has been wrapped (wrapper is None), set attribute in the wrapper.
(c) If attribute is found in the wrapped object, set it there. This rule is ignored for AF storages.
(d) Set attribute in wrapper.
"""
if key is 'wrapped':
object.__setattr__(self, key, value)
value.__setattr__('_parent', self)
return

try:
# If attribute is in this object, set it. Test by using getattribute instead of hasattr to avoid lookup in
# wrapped object.
self.__getattribute__(key)
super(ModelWrapper, self).__setattr__(key, value)
except AttributeError:
# Attribute is not in wrapper.
# In case no wrapped object is set yet (e.g. constructor), set in wrapper.
if 'wrapped' not in self.__dict__:
super(ModelWrapper, self).__setattr__(key, value)
return

if hasattr(self, key):
# Now use hasattr, we know getattribute already failed so if it returns true, it must be in the wrapped
# object. Hasattr is called on self instead of self.wrapped to account for the different handling of
# AF storages.
# Prefer setting the attribute in the wrapped object if exists.
setattr(self.wrapped, key, value)
else:
# If not, set in wrapper nonetheless.
super(ModelWrapper, self).__setattr__(key, value)

def __eq__(self, other):
return self.wrapped == other

@Parameterized.name.getter
def name(self):
name = super(ModelWrapper, self).name
return ".".join([name, str.lower(self.__class__.__name__)])
69 changes: 27 additions & 42 deletions GPflowOpt/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from GPflow.param import DataHolder, AutoFlow, Parameterized
from GPflow.model import Model, GPModel
from GPflow.param import DataHolder, AutoFlow
from GPflow.model import GPModel
from GPflow import settings
import numpy as np
from .transforms import LinearTransform, DataTransform
from .domain import UnitCube
from .models import ModelWrapper

float_type = settings.dtypes.float_type


class DataScaler(GPModel):
class DataScaler(ModelWrapper):
"""
Model-wrapping class, primarily intended to assure the data in GPflow models is scaled. One DataScaler wraps one
GPflow model, and can scale the input as well as the output data. By default, if any kind of object attribute
is not found in the datascaler object, it is searched on the wrapped model.
Model-wrapping class, primarily intended to assure the data in GPflow models is scaled.

One DataScaler wraps one GPflow model, and can scale the input as well as the output data. By default,
if any kind of object attribute is not found in the datascaler object, it is searched on the wrapped model.

The datascaler supports both input as well as output scaling, although both scalings are set up differently:

Expand Down Expand Up @@ -59,13 +61,8 @@ def __init__(self, model, domain=None, normalize_Y=False):
:param normalize_Y: (default: False) enable automatic scaling of output values to zero mean and unit
variance.
"""
# model sanity checks
assert (model is not None)
assert (isinstance(model, GPModel))
self._parent = None

# Wrap model
self.wrapped = model
# model sanity checks, slightly stronger conditions than the wrapper
super(DataScaler, self).__init__(model)

# Initial configuration of the datascaler
n_inputs = model.X.shape[1]
Expand All @@ -74,34 +71,8 @@ def __init__(self, model, domain=None, normalize_Y=False):
self._normalize_Y = normalize_Y
self._output_transform = LinearTransform(np.ones(n_outputs), np.zeros(n_outputs))

# The assignments in the constructor of GPModel take care of initial re-scaling of model data.
super(DataScaler, self).__init__(model.X.value, model.Y.value, None, None, 1, name=model.name+"_datascaler")
del self.kern
del self.mean_function
del self.likelihood

def __getattr__(self, item):
"""
If an attribute is not found in this class, it is searched in the wrapped model
"""
return self.wrapped.__getattribute__(item)

def __setattr__(self, key, value):
"""
If setting :attr:`wrapped` attribute, point parent to this object (the datascaler)
"""
if key is 'wrapped':
object.__setattr__(self, key, value)
value.__setattr__('_parent', self)
return

super(DataScaler, self).__setattr__(key, value)

def __eq__(self, other):
return self.wrapped == other

def __str__(self, prepend=''):
return self.wrapped.__str__(prepend)
self.X = model.X.value
self.Y = model.Y.value

@property
def input_transform(self):
Expand Down Expand Up @@ -216,6 +187,20 @@ def build_predict(self, Xnew, full_cov=False):
f, var = self.wrapped.build_predict(self.input_transform.build_forward(Xnew), full_cov=full_cov)
return self.output_transform.build_backward(f), self.output_transform.build_backward_variance(var)

@AutoFlow((float_type, [None, None]))
def predict_f(self, Xnew):
"""
Compute the mean and variance of held-out data at the points Xnew
"""
return self.build_predict(Xnew)

@AutoFlow((float_type, [None, None]))
def predict_f_full_cov(self, Xnew):
"""
Compute the mean and variance of held-out data at the points Xnew
"""
return self.build_predict(Xnew, full_cov=True)

@AutoFlow((float_type, [None, None]))
def predict_y(self, Xnew):
"""
Expand All @@ -230,6 +215,6 @@ def predict_density(self, Xnew, Ynew):
"""
Compute the (log) density of the data Ynew at the points Xnew
"""
mu, var = self.build_predict(Xnew)
mu, var = self.wrapped.build_predict(self.input_transform.build_forward(Xnew))
Ys = self.output_transform.build_forward(Ynew)
return self.likelihood.predict_density(mu, var, Ys)
5 changes: 0 additions & 5 deletions GPflowOpt/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,6 @@ def __invert__(self):
"""
raise NotImplementedError

def __str__(self):
raise NotImplementedError


class LinearTransform(DataTransform):
"""
Expand Down Expand Up @@ -155,5 +152,3 @@ def __invert__(self):
A_inv = np.linalg.inv(self.A.value.T)
return LinearTransform(A_inv, -np.dot(self.b.value, A_inv))

def __str__(self):
return 'XA + b'
8 changes: 8 additions & 0 deletions doc/source/interfaces.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,11 @@ Transform
:special-members:
.. autoclass:: GPflowOpt.transforms.DataTransform
:special-members:

ModelWrapper
------------
.. automodule:: GPflowOpt.models
:special-members:
.. autoclass:: GPflowOpt.models.ModelWrapper
:members:
:special-members:
9 changes: 4 additions & 5 deletions testing/test_datascaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ def test_object_integrity(self):
Xs, Ys = m.X.value, m.Y.value
n = DataScaler(m, self.domain)

self.assertEqual(n.wrapped, m)
self.assertEqual(m._parent, n)
self.assertTrue(np.allclose(Xs, n.X.value))
self.assertTrue(np.allclose(Ys, n.Y.value))

Expand Down Expand Up @@ -80,7 +78,7 @@ def test_enabling_transforms(self):

def test_predict_scaling(self):
m = self.create_parabola_model()
n = DataScaler(self.create_parabola_model(), self.domain)
n = DataScaler(self.create_parabola_model(), self.domain, normalize_Y=True)
m.optimize()
n.optimize()

Expand All @@ -100,7 +98,8 @@ def test_predict_scaling(self):
self.assertTrue(np.allclose(fr, fs, atol=1e-3))
self.assertTrue(np.allclose(vr, vs, atol=1e-3))

Yt = parabola2d(Xt) #+ np.random.rand(20, 1) * 0.05
Yt = parabola2d(Xt)
fr = m.predict_density(Xt, Yt)
fs = n.predict_density(Xt, Yt)
np.testing.assert_allclose(fr, fs, rtol=1e-3)
np.testing.assert_allclose(fr, fs, rtol=1e-2)

Loading