diff --git a/GPy/examples/classification.py b/GPy/examples/classification.py index 77d1982c..b06b92e8 100644 --- a/GPy/examples/classification.py +++ b/GPy/examples/classification.py @@ -166,3 +166,35 @@ def FITC_crescent_data(num_inducing=10, seed=default_seed): print(m) m.plot() return m + + +def toy_heavyside(seed=default_seed): + """ + Simple 1D classification example using a heavy side gp transformation + :param seed : seed value for data generation (default is 4). + :type seed: int + """ + + data = GPy.util.datasets.toy_linear_1d_classification(seed=seed) + Y = data['Y'][:, 0:1] + Y[Y.flatten() == -1] = 0 + + # Model definition + noise_model = GPy.likelihoods.binomial(GPy.likelihoods.noise_models.gp_transformations.Heaviside()) + likelihood = GPy.likelihoods.EP(Y,noise_model) + m = GPy.models.GPClassification(data['X'], likelihood=likelihood) + + # Optimize + m.update_likelihood_approximation() + # Parameters optimization: + m.optimize() + #m.pseudo_EM() + + # Plot + fig, axes = pb.subplots(2,1) + m.plot_f(ax=axes[0]) + m.plot(ax=axes[1]) + print(m) + + return m + diff --git a/GPy/likelihoods/noise_models/binomial_noise.py b/GPy/likelihoods/noise_models/binomial_noise.py index abad4822..256eaa3c 100644 --- a/GPy/likelihoods/noise_models/binomial_noise.py +++ b/GPy/likelihoods/noise_models/binomial_noise.py @@ -64,7 +64,7 @@ class Binomial(NoiseDistribution): if isinstance(self.gp_link,gp_transformations.Probit): return stats.norm.cdf(mu/np.sqrt(1+sigma**2)) elif isinstance(self.gp_link,gp_transformations.Heaviside): - return stats.norm.cdf(mu/sigma) + return stats.norm.cdf(mu/sigma) else: raise NotImplementedError @@ -74,8 +74,6 @@ class Binomial(NoiseDistribution): else: raise NotImplementedError - - def _mass(self,gp,obs): #NOTE obs must be in {0,1} p = self.gp_link.transf(gp) diff --git a/GPy/likelihoods/noise_models/gp_transformations.py b/GPy/likelihoods/noise_models/gp_transformations.py index 0f8cf18c..ccf965d9 100644 --- a/GPy/likelihoods/noise_models/gp_transformations.py +++ b/GPy/likelihoods/noise_models/gp_transformations.py @@ -116,10 +116,10 @@ class Heaviside(GPTransformation): """ def transf(self,f): #transformation goes here - return np.where(f>0, 1, -1) + return np.where(f>0, 1, 0) def dtransf_df(self,f): - raise NotImplementedError, "this function is not differentiable!" + raise NotImplementedError, "This function is not differentiable!" def d2transf_df2(self,f): - raise NotImplementedError, "this function is not differentiable!" + raise NotImplementedError, "This function is not differentiable!" diff --git a/GPy/models/gp_classification.py b/GPy/models/gp_classification.py index 73d492fe..fce51cfa 100644 --- a/GPy/models/gp_classification.py +++ b/GPy/models/gp_classification.py @@ -14,7 +14,7 @@ class GPClassification(GP): This is a thin wrapper around the models.GP class, with a set of sensible defaults :param X: input observations - :param Y: observed values + :param Y: observed values, can be None if likelihood is not None :param likelihood: a GPy likelihood, defaults to Binomial with probit link_function :param kernel: a GPy kernel, defaults to rbf :param normalize_X: whether to normalize the input data before computing (predictions will be in original scales)