mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-04-30 23:36:23 +02:00
[logging] more on logging
This commit is contained in:
parent
eb9fb180fb
commit
a9443417d7
3 changed files with 19 additions and 8 deletions
|
|
@ -46,7 +46,7 @@ class SparseGP(GP):
|
|||
self.num_inducing = Z.shape[0]
|
||||
|
||||
GP.__init__(self, X, Y, kernel, likelihood, inference_method=inference_method, name=name, Y_metadata=Y_metadata)
|
||||
|
||||
self.logger.info("Adding Z as parameter")
|
||||
self.add_parameter(self.Z, index=0)
|
||||
|
||||
def has_uncertain_inputs(self):
|
||||
|
|
@ -66,10 +66,10 @@ class SparseGP(GP):
|
|||
#gradients wrt Z
|
||||
self.Z.gradient = self.kern.gradients_X(dL_dKmm, self.Z)
|
||||
self.Z.gradient += self.kern.gradients_Z_expectations(
|
||||
self.grad_dict['dL_dpsi0'],
|
||||
self.grad_dict['dL_dpsi1'],
|
||||
self.grad_dict['dL_dpsi2'],
|
||||
Z=self.Z,
|
||||
self.grad_dict['dL_dpsi0'],
|
||||
self.grad_dict['dL_dpsi1'],
|
||||
self.grad_dict['dL_dpsi2'],
|
||||
Z=self.Z,
|
||||
variational_posterior=self.X)
|
||||
else:
|
||||
#gradients wrt kernel
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import numpy as np
|
|||
from ...util.misc import param_to_array
|
||||
from . import LatentFunctionInference
|
||||
log_2_pi = np.log(2*np.pi)
|
||||
import logging
|
||||
import logging, itertools
|
||||
logger = logging.getLogger('vardtc')
|
||||
|
||||
class VarDTC(LatentFunctionInference):
|
||||
|
|
@ -228,18 +228,28 @@ class VarDTCMissingData(LatentFunctionInference):
|
|||
self._subarray_indices = []
|
||||
csa = common_subarrays(inan, 1)
|
||||
size = len(csa)
|
||||
next_ten = 0
|
||||
for i, (v,ind) in enumerate(csa.iteritems()):
|
||||
if not np.all(v):
|
||||
logger.info('preparing subarrays {:3.3%}'.format((i+1.)/size))
|
||||
if ((i+1.)/size) >= next_ten:
|
||||
logger.info('preparing subarrays {:3%}'.format((i+1.)/size))
|
||||
next_ten += max(.1, 1./size)
|
||||
v = ~np.array(v, dtype=bool)
|
||||
ind = np.array(ind, dtype=int)
|
||||
if ind.size == Y.shape[1]:
|
||||
ind = slice(None)
|
||||
self._subarray_indices.append([v,ind])
|
||||
logger.info('preparing subarrays Y')
|
||||
#logger.info('preparing subarrays {:3.3%}'.format((i+1.)/size))
|
||||
#Ys = [Y[v, :][:, ind] for v, ind in self._subarray_indices]
|
||||
logger.info('preparing traces Y')
|
||||
|
||||
next_ten = [0.]
|
||||
count = itertools.count()
|
||||
def trace(y, v, ind):
|
||||
i = count.next()
|
||||
if ((i+1.)/size) >= next_ten[0]:
|
||||
logger.info('preparing traces {:3%}'.format((i+1.)/size))
|
||||
next_ten[0] += .1
|
||||
y = y[v,:][:,ind]
|
||||
return np.einsum('ij,ij->', y,y)
|
||||
traces = [trace(Y, v, ind) for v, ind in self._subarray_indices]
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ class BayesianGPLVM(SparseGP):
|
|||
inference_method = VarDTC()
|
||||
|
||||
SparseGP.__init__(self, X, Y, Z, kernel, likelihood, inference_method, name, **kwargs)
|
||||
self.logger.info("Adding X as parameter")
|
||||
self.add_parameter(self.X, index=0)
|
||||
|
||||
def set_X_gradients(self, X, X_grad):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue