diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d56ed1f..8b0fc679 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,9 @@ ## Unreleased -+ update import in `.plotting.matplot_dep.defaults` due to change in matplotlib +* update import in `.plotting.matplot_dep.defaults` due to change in matplotlib + +* Correct dl_dm term in student t inference #1065 ## v1.13.1 (2024-01-14) diff --git a/GPy/inference/latent_function_inference/exact_studentt_inference.py b/GPy/inference/latent_function_inference/exact_studentt_inference.py index 161dd289..440989e1 100644 --- a/GPy/inference/latent_function_inference/exact_studentt_inference.py +++ b/GPy/inference/latent_function_inference/exact_studentt_inference.py @@ -35,15 +35,20 @@ class ExactStudentTInference(LatentFunctionInference): # Log marginal N = Y.shape[0] D = Y.shape[1] - log_marginal = 0.5 * (-N * np.log((nu - 2) * np.pi) - W_logdet - (nu + N) * np.log(1 + beta / (nu - 2))) + log_marginal = 0.5 * ( + -N * np.log((nu - 2) * np.pi) + - W_logdet + - (nu + N) * np.log(1 + beta / (nu - 2)) + ) log_marginal += gammaln((nu + N) / 2) - gammaln(nu / 2) # Gradients dL_dK = 0.5 * ((nu + N) / (nu + beta - 2) * tdot(alpha) - D * Wi) - dL_dnu = -N / (nu - 2.) + digamma(0.5 * (nu + N)) - digamma(0.5 * nu) - dL_dnu -= np.log(1 + beta / (nu - 2.)) + dL_dnu = -N / (nu - 2.0) + digamma(0.5 * (nu + N)) - digamma(0.5 * nu) + dL_dnu -= np.log(1 + beta / (nu - 2.0)) dL_dnu += ((nu + N) * beta) / ((nu - 2) * (beta + nu - 2)) dL_dnu *= 0.5 - gradients = {'dL_dK': dL_dK, 'dL_dnu': dL_dnu, 'dL_dm': alpha} + dL_dm = (nu + N) / (nu + beta - 2) * alpha + gradients = {"dL_dK": dL_dK, "dL_dnu": dL_dnu, "dL_dm": dL_dm} return posterior, log_marginal, gradients