mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-10 12:32:40 +02:00
added print m and print m.all differentiation
This commit is contained in:
parent
549f64892e
commit
57eda0b77c
5 changed files with 47 additions and 13 deletions
|
|
@ -397,13 +397,17 @@ class Model(Parameterized):
|
|||
return np.nan
|
||||
return 0.5 * self._get_params().size * np.log(2 * np.pi) + self.log_likelihood() - hld
|
||||
|
||||
def __str__(self):
|
||||
s = Parameterized.__str__(self).split('\n')
|
||||
def __str__(self, names=None):
|
||||
if names is None:
|
||||
names = self._get_print_names()
|
||||
s = Parameterized.__str__(self, names=names).split('\n')
|
||||
# add priors to the string
|
||||
if self.priors is not None:
|
||||
strs = [str(p) if p is not None else '' for p in self.priors]
|
||||
else:
|
||||
strs = [''] * len(self._get_params())
|
||||
strs = [''] * len(self._get_param_names())
|
||||
name_indices = self.grep_param_names("|".join(names))
|
||||
strs = np.array(strs)[name_indices]
|
||||
width = np.array(max([len(p) for p in strs] + [5])) + 4
|
||||
|
||||
log_like = self.log_likelihood()
|
||||
|
|
|
|||
|
|
@ -27,6 +27,9 @@ class Parameterized(object):
|
|||
|
||||
def _get_param_names(self):
|
||||
raise NotImplementedError, "this needs to be implemented to use the Parameterized class"
|
||||
def _get_print_names(self):
|
||||
""" Override for which names to print out, when using print m """
|
||||
return self._get_param_names()
|
||||
|
||||
def pickle(self, filename, protocol=None):
|
||||
if protocol is None:
|
||||
|
|
@ -333,19 +336,26 @@ class Parameterized(object):
|
|||
n = [nn for i, nn in enumerate(n) if not i in remove]
|
||||
return n
|
||||
|
||||
def __str__(self, nw=30):
|
||||
@property
|
||||
def all(self):
|
||||
return self.__str__(self._get_param_names())
|
||||
|
||||
|
||||
def __str__(self, names=None, nw=30):
|
||||
"""
|
||||
Return a string describing the parameter names and their ties and constraints
|
||||
"""
|
||||
names = self._get_param_names()
|
||||
if names is None:
|
||||
names = self._get_print_names()
|
||||
name_indices = self.grep_param_names("|".join(names))
|
||||
N = len(names)
|
||||
|
||||
if not N:
|
||||
return "This object has no free parameters."
|
||||
header = ['Name', 'Value', 'Constraints', 'Ties']
|
||||
values = self._get_params() # map(str,self._get_params())
|
||||
values = self._get_params()[name_indices] # map(str,self._get_params())
|
||||
# sort out the constraints
|
||||
constraints = [''] * len(names)
|
||||
constraints = [''] * len(self._get_param_names())
|
||||
for i, t in zip(self.constrained_indices, self.constraints):
|
||||
for ii in i:
|
||||
constraints[ii] = t.__str__()
|
||||
|
|
|
|||
|
|
@ -194,6 +194,9 @@ class SparseGP(GPBase):
|
|||
return sum([['iip_%i_%i' % (i, j) for j in range(self.Z.shape[1])] for i in range(self.Z.shape[0])], [])\
|
||||
+ self.kern._get_param_names_transformed() + self.likelihood._get_param_names()
|
||||
|
||||
def _get_print_names(self):
|
||||
return self.kern._get_param_names_transformed() + self.likelihood._get_param_names()
|
||||
|
||||
def update_likelihood_approximation(self):
|
||||
"""
|
||||
Approximates a non-gaussian likelihood using Expectation Propagation
|
||||
|
|
|
|||
|
|
@ -57,6 +57,7 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
return SparseGP.getstate(self) + [self.init]
|
||||
|
||||
def setstate(self, state):
|
||||
self._const_jitter = None
|
||||
self.init = state.pop()
|
||||
SparseGP.setstate(self, state)
|
||||
|
||||
|
|
@ -65,6 +66,9 @@ class BayesianGPLVM(SparseGP, GPLVM):
|
|||
S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
|
||||
return (X_names + S_names + SparseGP._get_param_names(self))
|
||||
|
||||
def _get_print_names(self):
|
||||
return SparseGP._get_print_names(self)
|
||||
|
||||
def _get_params(self):
|
||||
"""
|
||||
Horizontally stacks the parameters in order to present them to the optimizer.
|
||||
|
|
|
|||
|
|
@ -163,17 +163,28 @@ class MRD(Model):
|
|||
self._init_X(initx, self.likelihood_list)
|
||||
self._init_Z(initz, self.X)
|
||||
|
||||
def _get_param_names(self):
|
||||
# X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
|
||||
# S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
|
||||
def _get_latent_param_names(self):
|
||||
n1 = self.gref._get_param_names()
|
||||
n1var = n1[:self.NQ * 2 + self.MQ]
|
||||
return n1var
|
||||
|
||||
|
||||
def _get_kernel_names(self):
|
||||
map_names = lambda ns, name: map(lambda x: "{1}_{0}".format(*x),
|
||||
itertools.izip(ns,
|
||||
itertools.repeat(name)))
|
||||
return list(itertools.chain(n1var, *(map_names(\
|
||||
SparseGP._get_param_names(g)[self.MQ:], n) \
|
||||
for g, n in zip(self.bgplvms, self.names))))
|
||||
kernel_names = (map_names(SparseGP._get_param_names(g)[self.MQ:], n) for g, n in zip(self.bgplvms, self.names))
|
||||
return kernel_names
|
||||
|
||||
def _get_param_names(self):
|
||||
# X_names = sum([['X_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
|
||||
# S_names = sum([['X_variance_%i_%i' % (n, q) for q in range(self.input_dim)] for n in range(self.num_data)], [])
|
||||
n1var = self._get_latent_param_names()
|
||||
kernel_names = self._get_kernel_names()
|
||||
return list(itertools.chain(n1var, *kernel_names))
|
||||
|
||||
def _get_print_names(self):
|
||||
return list(itertools.chain(*self._get_kernel_names()))
|
||||
|
||||
def _get_params(self):
|
||||
"""
|
||||
|
|
@ -329,7 +340,9 @@ class MRD(Model):
|
|||
"""
|
||||
if titles is None:
|
||||
titles = [r'${}$'.format(name) for name in self.names]
|
||||
ymax = reduce(max, [numpy.ceil(max(g.input_sensitivity())) for g in self.bgplvms])
|
||||
def plotf(i, g, ax):
|
||||
ax.set_ylim([0,ymax])
|
||||
g.kern.plot_ARD(ax=ax, title=titles[i], *args, **kwargs)
|
||||
fig = self._handle_plotting(fignum, ax, plotf, sharex=sharex, sharey=sharey)
|
||||
return fig
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue