mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-09 12:02:38 +02:00
small tweak to the gradients in sparse GP
This commit is contained in:
parent
e6d08348c7
commit
ab56f3f9b7
3 changed files with 5 additions and 10 deletions
|
|
@ -1,7 +1,6 @@
|
|||
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
|
||||
|
||||
import pylab as pb
|
||||
import numpy as np
|
||||
import GPy
|
||||
|
|
@ -9,18 +8,15 @@ pb.ion()
|
|||
pb.close('all')
|
||||
|
||||
|
||||
######################################
|
||||
## 1 dimensional example
|
||||
|
||||
# sample inputs and outputs
|
||||
S = np.ones((20,1))
|
||||
X = np.random.uniform(-3.,3.,(20,1))
|
||||
Y = np.sin(X)+np.random.randn(20,1)*0.05
|
||||
|
||||
k = GPy.kern.bias(1) + GPy.kern.white(1)
|
||||
k = GPy.kern.rbf(1) + GPy.kern.white(1)
|
||||
|
||||
# create simple GP model
|
||||
m = GPy.models.uncertain_input_GP_regression(X,Y,S,kernel=k)
|
||||
m = GPy.models.sparse_GP_regression(X,Y,X_uncertainty=S,kernel=k)
|
||||
|
||||
# contrain all parameters to be positive
|
||||
m.constrain_positive('(variance|prec)')
|
||||
|
|
|
|||
|
|
@ -54,5 +54,6 @@ class sparse_GPLVM(sparse_GP_regression, GPLVM):
|
|||
|
||||
def plot(self):
|
||||
GPLVM.plot(self)
|
||||
#passing Z without a small amout of jitter will induce the white kernel where we don;t want it!
|
||||
mu, var = sparse_GP_regression.predict(self, self.Z+np.random.randn(*self.Z.shape)*0.0001)
|
||||
pb.plot(mu[:, 0] , mu[:, 1], 'ko')
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
|
||||
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
||||
|
||||
|
||||
import numpy as np
|
||||
import pylab as pb
|
||||
from ..util.linalg import mdot, jitchol, chol_inv, pdinv
|
||||
|
|
@ -50,7 +49,7 @@ class sparse_GP_regression(GP_regression):
|
|||
self.has_uncertain_inputs=False
|
||||
else:
|
||||
assert X_uncertainty.shape==X.shape
|
||||
self.has_uncertain_inputs=False
|
||||
self.has_uncertain_inputs=True
|
||||
self.X_uncertainty = X_uncertainty
|
||||
|
||||
GP_regression.__init__(self, X, Y, kernel=kernel, normalize_X=normalize_X, normalize_Y=normalize_Y)
|
||||
|
|
@ -99,8 +98,7 @@ class sparse_GP_regression(GP_regression):
|
|||
|
||||
# Compute dL_dpsi
|
||||
self.dL_dpsi0 = - 0.5 * self.D * self.beta * np.ones(self.N)
|
||||
dC_dpsi1 = (self.LLambdai.T[:,:, None, None] * self.V)
|
||||
self.dL_dpsi1 = (dC_dpsi1*self.C[None,:,None,:]).sum(1).sum(-1)
|
||||
self.dL_dpsi1 = (self.LLambdai.T[:,:,None,None]*self.V*self.C[None,:,None,:]).sum(1).sum(-1)
|
||||
self.dL_dpsi2 = - 0.5 * self.beta * (self.D*(self.LBL_inv - self.Kmmi) + self.G)
|
||||
|
||||
# Compute dL_dKmm
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue