mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-04-27 13:56:23 +02:00
48 lines
2.1 KiB
Python
48 lines
2.1 KiB
Python
# Copyright (c) 2012-2014, GPy authors (see AUTHORS.txt).
|
|
# Licensed under the BSD 3-clause license (see LICENSE.txt)
|
|
from .parameterization.priorizable import Priorizable
|
|
from paramz import Model as ParamzModel
|
|
|
|
class Model(ParamzModel, Priorizable):
|
|
|
|
def __init__(self, name):
|
|
super(Model, self).__init__(name) # Parameterized.__init__(self)
|
|
|
|
def log_likelihood(self):
|
|
raise NotImplementedError("this needs to be implemented to use the model class")
|
|
|
|
def _log_likelihood_gradients(self):
|
|
return self.gradient#.copy()
|
|
|
|
def objective_function(self):
|
|
"""
|
|
The objective function for the given algorithm.
|
|
|
|
This function is the true objective, which wants to be minimized.
|
|
Note that all parameters are already set and in place, so you just need
|
|
to return the objective function here.
|
|
|
|
For probabilistic models this is the negative log_likelihood
|
|
(including the MAP prior), so we return it here. If your model is not
|
|
probabilistic, just return your objective to minimize here!
|
|
"""
|
|
return -float(self.log_likelihood()) - self.log_prior()
|
|
|
|
def objective_function_gradients(self):
|
|
"""
|
|
The gradients for the objective function for the given algorithm.
|
|
The gradients are w.r.t. the *negative* objective function, as
|
|
this framework works with *negative* log-likelihoods as a default.
|
|
|
|
You can find the gradient for the parameters in self.gradient at all times.
|
|
This is the place, where gradients get stored for parameters.
|
|
|
|
This function is the true objective, which wants to be minimized.
|
|
Note that all parameters are already set and in place, so you just need
|
|
to return the gradient here.
|
|
|
|
For probabilistic models this is the gradient of the negative log_likelihood
|
|
(including the MAP prior), so we return it here. If your model is not
|
|
probabilistic, just return your *negative* gradient here!
|
|
"""
|
|
return -(self._log_likelihood_gradients() + self._log_prior_gradients())
|