diff --git a/GPy/core/gp_base.py b/GPy/core/gp_base.py index 41c4a001..547985ef 100644 --- a/GPy/core/gp_base.py +++ b/GPy/core/gp_base.py @@ -5,6 +5,7 @@ import pylab as pb from model import Model import warnings from ..likelihoods import Gaussian, Gaussian_Mixed_Noise +from ..core.parameter import ObservableArray class GPBase(Model): """ @@ -14,7 +15,7 @@ class GPBase(Model): def __init__(self, X, likelihood, kernel, normalize_X=False): super(GPBase, self).__init__() - self.X = X + self.X = ObservableArray(X) assert len(self.X.shape) == 2 self.num_data, self.input_dim = self.X.shape assert isinstance(kernel, kern.kern) @@ -26,13 +27,14 @@ class GPBase(Model): if normalize_X: self._Xoffset = X.mean(0)[None, :] self._Xscale = X.std(0)[None, :] - self.X = (X.copy() - self._Xoffset) / self._Xscale + self.X = ObservableArray((X.copy() - self._Xoffset) / self._Xscale) else: self._Xoffset = np.zeros((1, self.input_dim)) self._Xscale = np.ones((1, self.input_dim)) self.add_parameter(self.kern, gradient=self.dL_dtheta) self.add_parameter(self.likelihood, gradient=self.dL_dlikelihood) + self.kern.connect_input(self.X) # Model.__init__(self) # All leaf nodes should call self._set_params(self._get_params()) at diff --git a/GPy/core/parameter.py b/GPy/core/parameter.py index 79660162..dcf26c3b 100644 --- a/GPy/core/parameter.py +++ b/GPy/core/parameter.py @@ -6,7 +6,7 @@ Created on 4 Sep 2013 import itertools import numpy from transformations import Logexp, NegativeLogexp, Logistic -from parameterized import Parentable +from parameterized import Nameable, Pickleable ###### printing __constraints_name__ = "Constraint" @@ -16,7 +16,48 @@ __precision__ = numpy.get_printoptions()['precision'] # numpy printing precision __print_threshold__ = 5 ###### -class Param(numpy.ndarray, Parentable): +class ListArray(numpy.ndarray): + """ + ndarray which can be stored in lists and checked if it is in. + """ + def __new__(cls, input_array): + obj = numpy.asanyarray(input_array).view(cls) + return obj + def __eq__(self, other): + return other is self + +class ObservableArray(ListArray): + """ + An ndarray which reports changed to it's observers. + The observers can add themselves with a callable, which + will be called every time this array changes. The callable + takes exactly one argument, which is this array itself. + """ + def __new__(cls, input_array): + obj = super(ObservableArray, cls).__new__(cls, input_array).view(cls) + obj._observers_ = {} + return obj + def __array_finalize__(self, obj): + # see InfoArray.__array_finalize__ for comments + if obj is None: return + self._observers_ = getattr(obj, '_observers_', None) + def add_observer(self, observer, callble): + self._observers_[observer] = callble + def remove_observer(self, observer): + del self._observers_[observer] + def _notify_observers(self): + [callble(self) for callble in self._observers_.itervalues()] + def __setitem__(self, s, val): + if not numpy.all(numpy.equal(self[s], val)): + numpy.put(self,s,val) + self._notify_observers() + def __getslice__(self, start, stop): + return self.__getitem__(slice(start, stop)) + def __setslice__(self, start, stop, val): + return self.__setitem__(slice(start, stop), val) + + +class Param(ObservableArray, Nameable, Pickleable): """ Parameter object for GPy models. @@ -41,9 +82,9 @@ class Param(numpy.ndarray, Parentable): """ __array_priority__ = -numpy.inf # Never give back Param def __new__(cls, name, input_array, *args, **kwargs): - obj = numpy.atleast_1d(numpy.array(input_array)).view(cls) - obj._name_ = name + obj = numpy.atleast_1d(super(Param, cls).__new__(cls, input_array=input_array)) obj._direct_parent_ = None + obj._name_ = name obj._parent_index_ = None obj._highest_parent_ = None obj._current_slice_ = (slice(obj.shape[0]),) @@ -55,7 +96,10 @@ class Param(numpy.ndarray, Parentable): obj._tied_to_me_ = ParamDict(set) obj._tied_to_ = [] obj._original_ = True - return obj + return obj + def __init__(self, name, input_array): + super(Param, self).__init__(name=name) + def __array_finalize__(self, obj): # see InfoArray.__array_finalize__ for comments if obj is None: return @@ -71,11 +115,6 @@ class Param(numpy.ndarray, Parentable): self._realndim_ = getattr(obj, '_realndim_', None) self._updated_ = getattr(obj, '_updated_', None) self._original_ = getattr(obj, '_original_', None) - def __eq__(self, other): - return other is self - if other is self: - return True - return super(Param, self).__eq__(other) def __array_wrap__(self, out_arr, context=None): return out_arr.view(numpy.ndarray) @@ -117,24 +156,24 @@ class Param(numpy.ndarray, Parentable): #=========================================================================== def _set_params(self, param): self.flat = param - self._fire_changed() + self._notify_tied_parameters() def _get_params(self): return self.flat - @property - def name(self): - """ - Name of this parameter. - This can be a callable without parameters. The callable will be called - every time the name property is accessed. - """ - if callable(self._name_): - return self._name_() - return self._name_ - @name.setter - def name(self, new_name): - from_name = self.name - self._name_ = new_name - self._direct_parent_._name_changed(self, from_name) +# @property +# def name(self): +# """ +# Name of this parameter. +# This can be a callable without parameters. The callable will be called +# every time the name property is accessed. +# """ +# if callable(self._name_): +# return self._name_() +# return self._name_ +# @name.setter +# def name(self, new_name): +# from_name = self.name +# self._name_ = new_name +# self._direct_parent_._name_changed(self, from_name) @property def _parameters_(self): return [] @@ -232,6 +271,11 @@ class Param(numpy.ndarray, Parentable): Broadcasting is allowed, so you can tie a whole dimension to one parameter: self[:,0].tie_to(other), where other is a one-value parameter. + + Note: this method will tie to the parameter which is the last in + the chain of ties. Thus, if you tie to a tied parameter, + this tie will be created to the parameter the param is tied + to. """ assert isinstance(param, Param), "Argument {1} not of type {0}".format(Param,param.__class__) try: @@ -241,9 +285,17 @@ class Param(numpy.ndarray, Parentable): self._direct_parent_._get_original(self)[self._current_slice_] = param except ValueError: raise ValueError("Trying to tie {} with shape {} to {} with shape {}".format(self.name, self.shape, param.name, param.shape)) + if param is self: + raise RuntimeError, 'Cyclic tieing is not allowed' + if len(param._tied_to_) > 0: + self.tie_to(param._tied_to_[0]) + return self._direct_parent_._get_original(self)._tied_to_ += [param] param._add_tie_listener(self) self._highest_parent_._set_fixed(self) + for t in self._tied_to_me_.iterkeys(): + t.untie() + t.tie_to(param) # self._direct_parent_._add_tie(self, param) def untie(self, *ties): @@ -254,9 +306,9 @@ class Param(numpy.ndarray, Parentable): self._tied_to_ = [tied_to for tied_to in self._tied_to_ for t in tied_to._tied_to_me_ if self._parent_index_==t._direct_parent_._get_original(t)._parent_index_] self._highest_parent_._set_unfixed(self) # self._direct_parent_._remove_tie(self, *params) - def _fire_changed(self): + def _notify_tied_parameters(self): for tied, ind in self._tied_to_me_.iteritems(): - tied._on_change(self.base, list(ind)) + tied._on_tied_parameter_changed(self.base, list(ind)) def _add_tie_listener(self, tied_to_me): self._tied_to_me_[tied_to_me] |= set(self._raveled_index()) def _remove_tie_listener(self, to_remove): @@ -271,14 +323,14 @@ class Param(numpy.ndarray, Parentable): del self._tied_to_me_[tmp] else: del self._tied_to_me_[t] - def _on_change(self, val, ind): + def _on_tied_parameter_changed(self, val, ind): if not self._updated_: #not fast_array_equal(self, val[ind]): self._updated_ = True if self._original_: self.__setitem__(slice(None), val[ind], update=False) else: # this happens when indexing created a copy of the array self._direct_parent_._get_original(self).__setitem__(self._current_slice_, val[ind], update=False) - self._fire_changed() + self._notify_tied_parameters() self._updated_ = False #=========================================================================== # Prior Operations @@ -307,17 +359,13 @@ class Param(numpy.ndarray, Parentable): s = (s,) if not reduce(lambda a,b: a or numpy.any(b is Ellipsis), s, False) and len(s) <= self.ndim: s += (Ellipsis,) - new_arr = numpy.ndarray.__getitem__(self, s, *args, **kwargs) + new_arr = super(Param, self).__getitem__(s, *args, **kwargs) try: new_arr._current_slice_ = s; new_arr._original_ = self.base is new_arr.base except AttributeError: pass# returning 0d array or float, double etc return new_arr - def __getslice__(self, start, stop): - return self.__getitem__(slice(start, stop)) - def __setslice__(self, start, stop, val): - return self.__setitem__(slice(start, stop), val) def __setitem__(self, s, val, update=True): - numpy.ndarray.__setitem__(self, s, val) - self._fire_changed() + super(Param, self).__setitem__(s, val) + self._notify_tied_parameters() if update: self._highest_parent_.parameters_changed() #=========================================================================== @@ -494,7 +542,7 @@ class ParamConcatenation(object): def __setitem__(self, s, val, update=True): ind = numpy.zeros(sum(self._param_sizes), dtype=bool); ind[s] = True; vals = self._vals(); vals[s] = val; del val - [numpy.place(p, ind[ps], vals[ps]) and p._fire_changed() + [numpy.place(p, ind[ps], vals[ps]) and p._notify_tied_parameters() for p, ps in zip(self.params, self._param_slices_)] if update: self.params[0]._highest_parent_.parameters_changed() diff --git a/GPy/core/parameterized.py b/GPy/core/parameterized.py index 9c67624e..669f0d72 100644 --- a/GPy/core/parameterized.py +++ b/GPy/core/parameterized.py @@ -32,6 +32,24 @@ class Nameable(Parentable): if self.has_parent(): self._direct_parent_._name_changed(self, from_name) +class Pickleable(object): + def getstate(self): + """ + Returns the state of this class in a memento pattern. + The state must be a list-like structure of all the fields + this class need to run + """ + raise NotImplementedError, "To be able to use pickling you need to implement this method" + def setstate(self, state): + """ + Set the state (memento pattern) of this class to the given state. + Usually this is just the counterpart to getstate, such that + an object is a copy of another when calling + + copy = .__new__(*args,**kw).setstate(.getstate()) + """ + raise NotImplementedError, "To be able to use pickling you need to implement this method" + from parameter import ParamConcatenation from index_operations import ParameterIndexOperations,\ index_empty @@ -47,7 +65,7 @@ FIXED = False UNFIXED = True #=============================================================================== -class Parameterized(Nameable): +class Parameterized(Nameable, Pickleable): """ Parameterized class @@ -161,7 +179,7 @@ class Parameterized(Nameable): def add_parameters(self, *parameters): """ - convinience method for adding several + convenience method for adding several parameters without gradient specification """ [self.add_parameter(p) for p in parameters] @@ -178,8 +196,15 @@ class Parameterized(Nameable): # or p in names_params_indices)] # self._connect_parameters() def parameters_changed(self): + """ + This method gets called when parameters have changed. + Another way of listening to parameter changes is to + add self as a listener to the parameter, such that + updates get passed through. See :py:function:``GPy.core.parameter.Observable.add_observer`` + """ # will be called as soon as paramters have changed pass + def _connect_parameters(self): # connect parameterlist to this parameterized object # This just sets up the right connection for the params objects diff --git a/GPy/kern/kern.py b/GPy/kern/kern.py index 8f92ccbc..7c18fc87 100644 --- a/GPy/kern/kern.py +++ b/GPy/kern/kern.py @@ -52,6 +52,9 @@ class kern(Parameterized): def parameters_changed(self): [p.parameters_changed() for p in self._parameters_] + def connect_input(self, Xparam): + [p.connect_input(Xparam) for p in self._parameters_] + def getstate(self): """ Get the current state of the class, diff --git a/GPy/kern/parts/bias.py b/GPy/kern/parts/bias.py index 22e0882b..715363de 100644 --- a/GPy/kern/parts/bias.py +++ b/GPy/kern/parts/bias.py @@ -16,7 +16,7 @@ class Bias(Kernpart): :type variance: float """ super(Bias, self).__init__(input_dim, 'bias') - self.variance = Param("variance", variance, None) + self.variance = Param("variance", variance) self.add_parameter(self.variance) #self._set_params(np.array([variance]).flatten()) diff --git a/GPy/kern/parts/kernpart.py b/GPy/kern/parts/kernpart.py index 9843bf7d..24612d22 100644 --- a/GPy/kern/parts/kernpart.py +++ b/GPy/kern/parts/kernpart.py @@ -21,7 +21,22 @@ class Kernpart(Parameterized): # the name of the covariance function. # link to parameterized objects self._parameters_ = [] + self._X = None + def connect_input(self, X): + X.add_observer(self, self.on_input_change) + self._X = X + + def on_input_change(self, X): + """ + During optimization this function will be called when + the inputs X changed. Use this to update caches dependent + on the inputs X. + """ + # overwrite this to update kernel when inputs X change + pass + + # def set_as_parameter_named(self, name, gradient, index=None, *args, **kwargs): # """ # :param names: name of parameter to set as parameter diff --git a/GPy/kern/parts/linear.py b/GPy/kern/parts/linear.py index f36e5968..6671579c 100644 --- a/GPy/kern/parts/linear.py +++ b/GPy/kern/parts/linear.py @@ -39,16 +39,23 @@ class Linear(Kernpart): else: if variances is not None: variances = np.asarray(variances) - assert variances.size == self.input_dim, "bad number of lengthscales" + assert variances.size == self.input_dim, "bad number of variances, need one ARD variance per input_dim" else: variances = np.ones(self.input_dim) self.variances = Param('variances', variances) self.add_parameters(self.variances) + self.variances.add_observer(self, self.update_variance) # initialize cache self._Z, self._mu, self._S = np.empty(shape=(3, 1)) self._X, self._X2 = np.empty(shape=(2, 1)) + + def update_variance(self, v): + self.variances2 = np.square(self.variances) + + def on_input_change(self, X): + self._K_computations(X, None) # def _get_params(self): # return self.variances @@ -56,8 +63,8 @@ class Linear(Kernpart): # def _set_params(self, x): # assert x.size == (self.num_params) # self.variances = x - def parameters_changed(self): - self.variances2 = np.square(self.variances) + #def parameters_changed(self): + # self.variances2 = np.square(self.variances) # # def _get_param_names(self): # if self.num_params == 1: @@ -74,7 +81,8 @@ class Linear(Kernpart): XX2 = X2 * np.sqrt(self.variances) target += np.dot(XX, XX2.T) else: - self._K_computations(X, X2) + if X is not self._X or X2 is not None: + self._K_computations(X, X2) target += self.variances * self._dot_product def Kdiag(self, X, target): @@ -88,7 +96,8 @@ class Linear(Kernpart): product = X[:, None, :] * X2[None, :, :] target += (dL_dK[:, :, None] * product).sum(0).sum(0) else: - self._K_computations(X, X2) + if X is not self._X or X2 is not None: + self._K_computations(X, X2) target += np.sum(self._dot_product * dL_dK) def dKdiag_dtheta(self, dL_dKdiag, X, target): diff --git a/GPy/kern/parts/rbf.py b/GPy/kern/parts/rbf.py index df615ac9..f026820c 100644 --- a/GPy/kern/parts/rbf.py +++ b/GPy/kern/parts/rbf.py @@ -50,32 +50,34 @@ class RBF(Kernpart): else: lengthscale = np.ones(self.input_dim) - #self._set_params(np.hstack((variance, lengthscale.flatten()))) - self.variance = Param('variance', variance, None) - self.lengthscale = Param('lengthscale', lengthscale, None) - + self.variance = Param('variance', variance) + self.lengthscale = Param('lengthscale', lengthscale) + self.lengthscale.add_observer(self, self.update_lengthscale) self.add_parameters(self.variance, self.lengthscale) -# self.set_as_parameter('variance', self.variance, None) -# self.set_as_parameter('lengthscale', self.lengthscale, None) # initialize cache - self._Z, self._mu, self._S = np.empty(shape=(3, 1)) - self._X, self._X2, self._params_save = np.empty(shape=(3, 1)) + #self._Z, self._mu, self._S = np.empty(shape=(3, 1)) + #self._X, self._X2, self._params_save = np.empty(shape=(3, 1)) # a set of optional args to pass to weave self.weave_options = {'headers' : [''], 'extra_compile_args': ['-fopenmp -O3'], # -march=native'], 'extra_link_args' : ['-lgomp']} + def on_input_change(self, X): + import pdb;pdb.set_trace() + self._K_computations(X, None) + + def update_lengthscale(self, l): + self.lengthscale2 = np.square(self.lengthscale) def parameters_changed(self): - self.lengthscale2 = np.square(self.lengthscale) # reset cached results #self._X, self._X2, self._params_save = np.empty(shape=(3, 1)) #self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S - self._X, self._X2 = np.empty(shape=(2, 1)) - self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S - + #self._X, self._X2 = np.empty(shape=(2, 1)) + #self._Z, self._mu, self._S = np.empty(shape=(3, 1)) # cached versions of Z,mu,S + pass # def _get_params(self): # return np.hstack((self.variance, self.lengthscale)) # # @@ -97,14 +99,17 @@ class RBF(Kernpart): # return ['variance'] + ['lengthscale_%i' % i for i in range(self.lengthscale.size)] def K(self, X, X2, target): - self._K_computations(X, X2) + if self._X is None or X.base is not self._X.base or X2 is not None: + import pdb;pdb.set_trace() + self._K_computations(X, X2) target += self.variance * self._K_dvar def Kdiag(self, X, target): np.add(target, self.variance, target) def dK_dtheta(self, dL_dK, X, X2, target): - self._K_computations(X, X2) + if self._X is None or X.base is not self._X.base or X2 is not None: + self._K_computations(X, X2) target[0] += np.sum(self._K_dvar * dL_dK) if self.ARD: dvardLdK = self._K_dvar * dL_dK @@ -152,7 +157,8 @@ class RBF(Kernpart): target[0] += np.sum(dL_dKdiag) def dK_dX(self, dL_dK, X, X2, target): - self._K_computations(X, X2) + if self._X is None or X.base is not self._X.base or X2 is not None: + self._K_computations(X, X2) if X2 is None: _K_dist = 2*(X[:, None, :] - X[None, :, :]) else: @@ -241,7 +247,7 @@ class RBF(Kernpart): def _K_computations(self, X, X2): #params = self._get_params() if not (fast_array_equal(X, self._X) and fast_array_equal(X2, self._X2)):# and fast_array_equal(self._params_save , params)): - self._X = X.copy() + #self._X = X.copy() #self._params_save = params.copy() if X2 is None: self._X2 = None diff --git a/GPy/kern/parts/white.py b/GPy/kern/parts/white.py index bca0918a..fbb4c72d 100644 --- a/GPy/kern/parts/white.py +++ b/GPy/kern/parts/white.py @@ -17,7 +17,7 @@ class White(Kernpart): def __init__(self,input_dim,variance=1.): super(White, self).__init__(input_dim, 'white') self.input_dim = input_dim - self.variance = Param('variance', variance, None) + self.variance = Param('variance', variance) self.add_parameters(self.variance) # self._set_params(np.array([variance]).flatten()) self._psi1 = 0 # TODO: more elegance here diff --git a/GPy/models/gplvm.py b/GPy/models/gplvm.py index ad78d51f..503b0db1 100644 --- a/GPy/models/gplvm.py +++ b/GPy/models/gplvm.py @@ -13,6 +13,7 @@ from ..core import GP from ..likelihoods import Gaussian from .. import util from GPy.util import plot_latent +from GPy.core.parameter import Param class GPLVM(GP): @@ -34,7 +35,9 @@ class GPLVM(GP): kernel = kern.rbf(input_dim, ARD=input_dim > 1) + kern.bias(input_dim, np.exp(-2)) likelihood = Gaussian(Y, normalize=normalize_Y, variance=np.exp(-2.)) GP.__init__(self, X, likelihood, kernel, normalize_X=False) - self.set_prior('.*X', Gaussian_prior(0, 1)) + self.X = Param('q_mean', self.X) + self.add_parameter(self.X, self.dL_dK, 0) + #self.set_prior('.*X', Gaussian_prior(0, 1)) self.ensure_default_constraints() def initialise_latent(self, init, input_dim, Y): @@ -50,33 +53,35 @@ class GPLVM(GP): def setstate(self, state): GP.setstate(self, state) - def _get_param_names(self): - return sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + GP._get_param_names(self) +# def _get_param_names(self): +# return sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], []) + GP._get_param_names(self) +# +# def _get_params(self): +# return np.hstack((self.X.flatten(), GP._get_params(self))) +# +# def _set_params(self, x): +# self.X = x[:self.num_data * self.input_dim].reshape(self.num_data, self.input_dim).copy() +# GP._set_params(self, x[self.X.size:]) - def _get_params(self): - return np.hstack((self.X.flatten(), GP._get_params(self))) - - def _set_params(self, x): - self.X = x[:self.num_data * self.input_dim].reshape(self.num_data, self.input_dim).copy() - GP._set_params(self, x[self.X.size:]) - - def _log_likelihood_gradients(self): - dL_dX = self.kern.dK_dX(self.dL_dK, self.X) - - return np.hstack((dL_dX.flatten(), GP._log_likelihood_gradients(self))) + def dK_dX(self): + return self.kern.dK_dX(self.dL_dK, self.X) +# def _log_likelihood_gradients(self): +# dL_dX = self.kern.dK_dX(self.dL_dK, self.X) +# +# return np.hstack((dL_dX.flatten(), GP._log_likelihood_gradients(self))) def jacobian(self,X): target = np.zeros((X.shape[0],X.shape[1],self.output_dim)) for i in range(self.output_dim): - target[:,:,i]=self.kern.dK_dX(np.dot(self.Ki,self.likelihood.Y[:,i])[None, :],X,self.X) + target[:,:,i]=self.kern.dK_dX(np.dot(self.Ki,self.likelihood.Y[:,i])[None, :],X,self.X) return target def magnification(self,X): target=np.zeros(X.shape[0]) - J = np.zeros((X.shape[0],X.shape[1],self.output_dim)) - J=self.jacobian(X) + #J = np.zeros((X.shape[0],X.shape[1],self.output_dim)) + J = self.jacobian(X) for i in range(X.shape[0]): - target[i]=np.sqrt(pb.det(np.dot(J[i,:,:],np.transpose(J[i,:,:])))) + target[i]=np.sqrt(pb.det(np.dot(J[i,:,:],np.transpose(J[i,:,:])))) return target def plot(self): diff --git a/GPy/util/visualize.py b/GPy/util/visualize.py index ecdf78ce..683c6c67 100644 --- a/GPy/util/visualize.py +++ b/GPy/util/visualize.py @@ -4,7 +4,7 @@ import GPy import numpy as np import matplotlib as mpl import time -import Image +from PIL import Image try: import visual visual_available = True diff --git a/doc/tuto_creating_new_kernels.rst b/doc/tuto_creating_new_kernels.rst index 6d30fe05..52ac20f7 100644 --- a/doc/tuto_creating_new_kernels.rst +++ b/doc/tuto_creating_new_kernels.rst @@ -35,45 +35,72 @@ The implementation of this function in mandatory. For all kernparts the first parameter ``input_dim`` corresponds to the dimension of the input space, and the following parameters stand for the parameterization of the kernel. -The following attributes are compulsory: ``self.input_dim`` (the dimension, integer), ``self.name`` (name of the kernel, string), ``self.num_params`` (number of parameters, integer). :: +You have to call ``super(, self).__init__(input_dim, +name)`` to make sure the input dimension and name of the kernel are +stored in the right place. These attributes are available as +``self.input_dim`` and ``self.name`` at runtime. +.. The following attributes are compulsory: ``self.input_dim`` (the dimension, integer), ``self.name`` (name of the kernel, string), ``self.num_params`` (number of parameters, integer). :: +Parameterization is done by adding +:py:class:``GPy.core.parameter.Param`` objects to ``self`` and use +them as normal numpy ``array-like``s in yout code. The parameters have +to be added by calling +:py:function:``GPy.core.parameterized:Parameterized.add_parameters`` +with the :py:class:``GPy.core.parameter.Param`` objects as arguments. def __init__(self,input_dim,variance=1.,lengthscale=1.,power=1.): - assert input_dim == 1, "For this kernel we assume input_dim=1" - self.input_dim = input_dim - self.num_params = 3 - self.name = 'rat_quad' - self.variance = variance - self.lengthscale = lengthscale - self.power = power + super(RationalQuadratic, self).__init__(input_dim, 'rat_quad') + assert input_dim == 1, "For this kernel we assume input_dim=1" + self.variance = Param('variance', variance) + self.lengthscale = Param('lengtscale', lengthscale) + self.power = Param('power', power) + self.add_parameters(self.variance, self.lengthscale, self.power) -**_get_params(self)** +From now on you can use the parameters ``self.variance, +self.lengthscale, self.power`` as normal numpy ``array-like``s in your +code. Updates from the optimization routine will be done +automatically. -The implementation of this function in mandatory. +**parameters_changed(self)** -This function returns a one dimensional array of length ``self.num_params`` containing the value of the parameters. :: +The implementation of this function is optional. - def _get_params(self): - return np.hstack((self.variance,self.lengthscale,self.power)) +This functions deals as a callback for each optimization iteration. If +one optimization step was successfull and the parameters (added by +:py:function:``GPy.core.parameterized:Parameterized.add_parameters``) +this callback function will be called to be able to update any +precomputations for the kernel. -**_set_params(self,x)** + def parameters_changed(self): + # nothing todo here -The implementation of this function in mandatory. +.. **_get_params(self)** -The input is a one dimensional array of length ``self.num_params`` containing the value of the parameters. The function has no output but it updates the values of the attribute associated to the parameters (such as ``self.variance``, ``self.lengthscale``, ...). :: +.. The implementation of this function in mandatory. - def _set_params(self,x): - self.variance = x[0] - self.lengthscale = x[1] - self.power = x[2] +.. This function returns a one dimensional array of length ``self.num_params`` containing the value of the parameters. :: -**_get_param_names(self)** +.. def _get_params(self): +.. return np.hstack((self.variance,self.lengthscale,self.power)) -The implementation of this function in mandatory. +.. **_set_params(self,x)** -It returns a list of strings of length ``self.num_params`` corresponding to the parameter names. :: +.. The implementation of this function in mandatory. - def _get_param_names(self): - return ['variance','lengthscale','power'] +.. The input is a one dimensional array of length ``self.num_params`` containing the value of the parameters. The function has no output but it updates the values of the attribute associated to the parameters (such as ``self.variance``, ``self.lengthscale``, ...). :: + +.. def _set_params(self,x): +.. self.variance = x[0] +.. self.lengthscale = x[1] +.. self.power = x[2] + +.. **_get_param_names(self)** + +.. The implementation of this function in mandatory. + +.. It returns a list of strings of length ``self.num_params`` corresponding to the parameter names. :: + +.. def _get_param_names(self): +.. return ['variance','lengthscale','power'] **K(self,X,X2,target)**