mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-03 16:52:39 +02:00
Merge branch 'params' of github.com:SheffieldML/GPy into params
This commit is contained in:
commit
3b82bc8f28
6 changed files with 53 additions and 7 deletions
|
|
@ -70,7 +70,7 @@ class GP(Model):
|
|||
|
||||
def parameters_changed(self):
|
||||
self.posterior, self._log_marginal_likelihood, grad_dict = self.inference_method.inference(self.kern, self.X, self.likelihood, self.Y, Y_metadata=self.Y_metadata)
|
||||
self._dL_dK = grad_dict['dL_dK']
|
||||
self.kern.update_gradients_full(grad_dict['dL_dK'])
|
||||
|
||||
def log_likelihood(self):
|
||||
return self._log_marginal_likelihood
|
||||
|
|
|
|||
|
|
@ -23,11 +23,26 @@ class EP(object):
|
|||
self.old_mutilde, self.old_vtilde = None, None
|
||||
|
||||
def inference(self, kern, X, likelihood, Y, Y_metadata=None):
|
||||
num_data, output_dim = X.shape
|
||||
assert output_dim ==1, "ep in 1D only (for now!)"
|
||||
|
||||
K = kern.K(X)
|
||||
|
||||
mu_tilde, tau_tilde = self.expectation_propagation()
|
||||
|
||||
Wi, LW, LWi, W_logdet = pdinv(K + np.diag(1./tau_tilde)
|
||||
|
||||
alpha, _ = dpotrs(LW, mu_tilde, lower=1)
|
||||
|
||||
log_marginal = 0.5*(-num_data * log_2_pi - W_logdet - np.sum(alpha * mu_tilde))
|
||||
|
||||
dL_dK = 0.5 * (tdot(alpha[:,None]) - Wi)
|
||||
|
||||
#TODO: what abot derivatives of the likelihood parameters?
|
||||
|
||||
return Posterior(woodbury_inv=Wi, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK}
|
||||
|
||||
|
||||
|
||||
def expectation_propagation(self, K, Y, Y_metadata, likelihood)
|
||||
|
||||
|
|
|
|||
|
|
@ -49,8 +49,7 @@ class ExactGaussianInference(object):
|
|||
|
||||
dL_dK = 0.5 * (tdot(alpha) - Y.shape[1] * Wi)
|
||||
|
||||
kern.update_gradients_full(dL_dK, X)
|
||||
|
||||
#TODO: does this really live here?
|
||||
likelihood.update_gradients(np.diag(dL_dK))
|
||||
|
||||
return Posterior(woodbury_chol=LW, woodbury_vector=alpha, K=K), log_marginal, {'dL_dK':dL_dK}
|
||||
|
|
|
|||
|
|
@ -14,3 +14,4 @@ import visualize
|
|||
import latent_space_visualizations
|
||||
import netpbmfile
|
||||
import inference_plots
|
||||
import maps
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ def plot_bbox(sf,bbox,inside_only=True):
|
|||
A,B,C,D = bbox
|
||||
plot(shape_records,xlims=[bbox[0],bbox[2]],ylims=[bbox[1],bbox[3]])
|
||||
|
||||
def plot_string_match(sf,regex,field):
|
||||
def plot_string_match(sf,regex,field,**kwargs):
|
||||
"""
|
||||
Plot the geometry of a shapefile whose fields match a regular expression given
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ def plot_string_match(sf,regex,field):
|
|||
:type field: integer
|
||||
"""
|
||||
index,shape_records = string_match(sf,regex,field)
|
||||
plot(shape_records)
|
||||
plot(shape_records,**kwargs)
|
||||
|
||||
|
||||
def new_shape_string(sf,name,regex,field=2,type=shapefile.POINT):
|
||||
|
|
@ -159,3 +159,13 @@ def new_shape_string(sf,name,regex,field=2,type=shapefile.POINT):
|
|||
|
||||
newshp.save(name)
|
||||
print index
|
||||
|
||||
def apply_bbox(sf,ax):
|
||||
"""
|
||||
Use bbox as xlim and ylim in ax
|
||||
"""
|
||||
limits = sf.bbox
|
||||
xlim = limits[0],limits[2]
|
||||
ylim = limits[1],limits[3]
|
||||
ax.set_xlim(xlim)
|
||||
ax.set_ylim(ylim)
|
||||
|
|
|
|||
|
|
@ -2,6 +2,27 @@ import numpy as np
|
|||
import warnings
|
||||
from .. import kern
|
||||
|
||||
def build_XY(input_list,output_list=None,index=None):
|
||||
num_outputs = len(input_list)
|
||||
_s = [0] + [ _x.shape[0] for _x in input_list ]
|
||||
_s = np.cumsum(_s)
|
||||
slices = [slice(a,b) for a,b in zip(_s[:-1],_s[1:])]
|
||||
if output_list is not None:
|
||||
assert num_outputs == len(output_list)
|
||||
Y = np.vstack(output_list)
|
||||
else:
|
||||
Y = None
|
||||
|
||||
if index is not None:
|
||||
assert len(index) == num_outputs
|
||||
I = np.vstack( [j*np.ones((_x.shape[0],1)) for _x,j in zip(input_list,index)] )
|
||||
else:
|
||||
I = np.vstack( [j*np.ones((_x.shape[0],1)) for _x,j in zip(input_list,range(num_outputs))] )
|
||||
|
||||
X = np.vstack(input_list)
|
||||
X = np.hstack([X,I])
|
||||
return X,Y,slices
|
||||
|
||||
def build_lcm(input_dim, num_outputs, CK = [], NC = [], W_columns=1,W=None,kappa=None):
|
||||
#TODO build_icm or build_lcm
|
||||
"""
|
||||
|
|
@ -25,9 +46,9 @@ def build_lcm(input_dim, num_outputs, CK = [], NC = [], W_columns=1,W=None,kappa
|
|||
k.input_dim = input_dim + 1
|
||||
warnings.warn("kernel's input dimension overwritten to fit input_dim parameter.")
|
||||
|
||||
kernel = CK[0].prod(kern.coregionalize(num_outputs,W_columns,W,kappa),tensor=True)
|
||||
kernel = CK[0].prod(kern.Coregionalize(num_outputs,W_columns,W,kappa),tensor=True)
|
||||
for k in CK[1:]:
|
||||
k_coreg = kern.coregionalize(num_outputs,W_columns,W,kappa)
|
||||
k_coreg = kern.Coregionalize(num_outputs,W_columns,W,kappa)
|
||||
kernel += k.prod(k_coreg,tensor=True)
|
||||
for k in NC:
|
||||
kernel += k
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue