mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 20:42:39 +02:00
starting varDTC with uncertain inputs [not working]
This commit is contained in:
parent
70e7d72bf2
commit
b2328c4f47
5 changed files with 29 additions and 12 deletions
|
|
@ -185,7 +185,7 @@ class GP(Model):
|
|||
from ..plotting.matplot_dep import models_plots
|
||||
models_plots.plot_fit_f(self,*args,**kwargs)
|
||||
|
||||
def plot(self, *args):
|
||||
def plot(self, *args, **kwargs):
|
||||
"""
|
||||
Plot the posterior of the GP.
|
||||
- In one dimension, the function is plotted with a shaded region
|
||||
|
|
@ -204,7 +204,7 @@ class GP(Model):
|
|||
"""
|
||||
assert "matplotlib" in sys.modules, "matplotlib package has not been imported."
|
||||
from ..plotting.matplot_dep import models_plots
|
||||
models_plots.plot_fit(self,*args)
|
||||
models_plots.plot_fit(self,*args,**kwargs)
|
||||
|
||||
def _getstate(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
from posterior import Posterior
|
||||
from ...util.linalg import jitchol, backsub_both_sides, tdot, dtrtrs
|
||||
import numpy as np
|
||||
from GPy.util.linalg import dtrtri
|
||||
log_2_pi = np.log(2*np.pi)
|
||||
|
||||
class VarDTC(object):
|
||||
|
|
@ -69,12 +70,17 @@ class VarDTC(object):
|
|||
psi2_beta = (psi2 * (beta.flatten().reshape(num_data, 1, 1))).sum(0)
|
||||
else:
|
||||
psi2_beta = psi2.sum(0) * beta
|
||||
if 0:
|
||||
evals, evecs = linalg.eigh(psi2_beta)
|
||||
clipped_evals = np.clip(evals, 0., 1e6) # TODO: make clipping configurable
|
||||
if not np.array_equal(evals, clipped_evals):
|
||||
pass # print evals
|
||||
tmp = evecs * np.sqrt(clipped_evals)
|
||||
tmp = tmp.T
|
||||
# no backsubstitution because of bound explosion on tr(A) if not...
|
||||
LmInv, _ = dtrtri(Lm, lower=1)
|
||||
A = LmInv.T.dot(psi2_beta.dot(LmInv))
|
||||
print A.sum()
|
||||
else:
|
||||
if het_noise:
|
||||
tmp = psi1 * (np.sqrt(beta.reshape(num_data, 1)))
|
||||
|
|
|
|||
|
|
@ -159,7 +159,7 @@ class RBF(Kernpart):
|
|||
if self.ARD:
|
||||
self.lengthscales.gradient += self._dL_dlengthscales_via_K(dL_dKmm, Z, None)
|
||||
else:
|
||||
self.lengthscale.gradient += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dK)
|
||||
self.lengthscale.gradient += (self.variance / self.lengthscale) * np.sum(self._K_dvar * self._K_dist2 * dL_dKmm)
|
||||
|
||||
def gradients_X(self, dL_dK, X, X2, target):
|
||||
#if self._X is None or X.base is not self._X.base or X2 is not None:
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class SparseGPRegression(SparseGP):
|
|||
|
||||
likelihood = likelihoods.Gaussian()
|
||||
|
||||
SparseGP.__init__(self, X, Y, Z, kernel, likelihood)
|
||||
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, X_variance=X_variance)
|
||||
self.ensure_default_constraints()
|
||||
|
||||
def _getstate(self):
|
||||
|
|
|
|||
|
|
@ -41,6 +41,17 @@ else:
|
|||
_blas_available = False
|
||||
warnings.warn("warning: caught this exception:" + str(e))
|
||||
|
||||
def dtrtri(L, lower=0):
|
||||
"""
|
||||
Wrapper for lapack dtrtrs function
|
||||
Inverse of L
|
||||
|
||||
:param L: Triangular Matrix L
|
||||
:param lower: is matrix lower (true) or upper (false)
|
||||
:returns: Li, info
|
||||
"""
|
||||
return lapack.dtrtri(L, lower=lower)
|
||||
|
||||
def dtrtrs(A, B, lower=0, trans=0, unitdiag=0):
|
||||
"""
|
||||
Wrapper for lapack dtrtrs function
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue