more coopyrighting

This commit is contained in:
James Hensman 2014-11-21 11:52:28 +00:00
parent 6aae3a37c8
commit 384b6c70c5
8 changed files with 30 additions and 21 deletions

View file

@ -1,3 +1,6 @@
# Copyright (c) 2012, James Hensman
# Licensed under the BSD 3-clause license (see LICENSE.txt)
__doc__ = """
Inference over Gaussian process latent functions

View file

@ -1,4 +1,4 @@
# Copyright (c) 2012, James Hensman
# Copyright (c) 2012-2014, James Hensman
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from posterior import Posterior

View file

@ -1,4 +1,4 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from posterior import Posterior

View file

@ -1,3 +1,5 @@
# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ...util.linalg import pdinv,jitchol,DSYR,tdot,dtrtrs, dpotrs
from posterior import Posterior

View file

@ -1,3 +1,6 @@
# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ...util import diag
from ...util.linalg import mdot, jitchol, backsub_both_sides, tdot, dtrtrs, dtrtri, dpotri, dpotrs, symmetrify, DSYR

View file

@ -1,5 +1,6 @@
"""
"""
# Copyright (c) 2014, Zhenwen Dai
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import numpy as np
from ...core import Model
from ...core.parameterization import variational
@ -7,27 +8,27 @@ from ...core.parameterization import variational
def infer_newX(model, Y_new, optimize=True, init='L2'):
"""
Infer the distribution of X for the new observed data *Y_new*.
:param model: the GPy model used in inference
:type model: GPy.core.Model
:param Y_new: the new observed data for inference
:type Y_new: numpy.ndarray
:param optimize: whether to optimize the location of new X (True by default)
:type optimize: boolean
:return: a tuple containing the estimated posterior distribution of X and the model that optimize X
:return: a tuple containing the estimated posterior distribution of X and the model that optimize X
:rtype: (GPy.core.parameterization.variational.VariationalPosterior, GPy.core.Model)
"""
infr_m = InferenceX(model, Y_new, init=init)
if optimize:
infr_m.optimize()
return infr_m.X, infr_m
class InferenceX(Model):
"""
The class for inference of new X with given new Y. (do_test_latent)
:param model: the GPy model used in inference
:type model: GPy.core.Model
:param Y: the new observed data for inference
@ -67,12 +68,12 @@ class InferenceX(Model):
self.Y = Y
self.X = self._init_X(model, Y, init=init)
self.compute_dL()
self.link_parameter(self.X)
def _init_X(self, model, Y_new, init='L2'):
# Initialize the new X by finding the nearest point in Y space.
Y = model.Y
if self.missing_data:
Y = Y[:,self.valid_dim]
@ -86,7 +87,7 @@ class InferenceX(Model):
elif init=='rand':
dist = np.random.rand(Y_new.shape[0],Y.shape[0])
idx = dist.argmin(axis=1)
from ...models import SSGPLVM
from ...util.misc import param_to_array
if isinstance(model, SSGPLVM):
@ -99,9 +100,9 @@ class InferenceX(Model):
else:
from ...core import Param
X = Param('latent mean',param_to_array(model.X[idx]).copy())
return X
def compute_dL(self):
# Common computation
beta = 1./np.fmax(self.likelihood.variance, 1e-6)
@ -120,7 +121,7 @@ class InferenceX(Model):
self.dL_dpsi2 = beta*(output_dim*self.posterior.woodbury_inv - np.einsum('md,od->mo',wv, wv))/2.
self.dL_dpsi1 = beta*np.dot(self.Y, wv.T)
self.dL_dpsi0 = -beta/2.*output_dim* np.ones(self.Y.shape[0])
def parameters_changed(self):
if self.uncertain_input:
psi0 = self.kern.psi0(self.Z, self.X)
@ -132,7 +133,7 @@ class InferenceX(Model):
psi2 = np.dot(psi1.T,psi1)
self._log_marginal_likelihood = (self.dL_dpsi2*psi2).sum()+(self.dL_dpsi1*psi1).sum()+(self.dL_dpsi0*psi0).sum()
if self.uncertain_input:
X_grad = self.kern.gradients_qX_expectations(variational_posterior=self.X, Z=self.Z, dL_dpsi0=self.dL_dpsi0, dL_dpsi1=self.dL_dpsi1, dL_dpsi2=self.dL_dpsi2)
self.X.set_gradients(X_grad)
@ -141,7 +142,7 @@ class InferenceX(Model):
X_grad = self.kern.gradients_X_diag(self.dL_dpsi0, self.X)
X_grad += self.kern.gradients_X(dL_dpsi1, self.X, self.Z)
self.X.gradient = X_grad
if self.uncertain_input:
from ...core.parameterization.variational import SpikeAndSlabPrior
if isinstance(self.variational_prior, SpikeAndSlabPrior):
@ -155,7 +156,7 @@ class InferenceX(Model):
# update for the KL divergence
self.variational_prior.update_gradients_KL(self.X)
self._log_marginal_likelihood += -KL_div
def log_likelihood(self):
return self._log_marginal_likelihood

View file

@ -1,4 +1,4 @@
# Copyright (c) 2013, 2014 GPy authors (see AUTHORS.txt).
# Copyright (c) 2013, 2014 Alan Saul
# Licensed under the BSD 3-clause license (see LICENSE.txt)
#
#Parts of this file were influenced by the Matlab GPML framework written by

View file

@ -1,4 +1,4 @@
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Copyright (c) 2014, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from posterior import Posterior