GPy/doc/_build/html/GPy.core.html

889 lines
62 KiB
HTML
Raw Normal View History

2015-07-19 14:30:27 -07:00
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>GPy.core package &mdash; GPy documentation</title>
<link rel="stylesheet" href="_static//default.css" type="text/css" />
<link rel="stylesheet" href="_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: './',
VERSION: '',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true
};
</script>
<script type="text/javascript" src="_static/jquery.js"></script>
<script type="text/javascript" src="_static/underscore.js"></script>
<script type="text/javascript" src="_static/doctools.js"></script>
<link rel="top" title="GPy documentation" href="index.html" />
<link rel="up" title="GPy package" href="GPy.html" />
<link rel="next" title="GPy.core.parameterization package" href="GPy.core.parameterization.html" />
<link rel="prev" title="GPy package" href="GPy.html" />
</head>
<body role="document">
<div class="related" role="navigation" aria-label="related navigation">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="GPy.core.parameterization.html" title="GPy.core.parameterization package"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="GPy.html" title="GPy package"
accesskey="P">previous</a> |</li>
<li class="nav-item nav-item-0"><a href="index.html">GPy documentation</a> &raquo;</li>
<li class="nav-item nav-item-1"><a href="GPy.html" accesskey="U">GPy package</a> &raquo;</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="gpy-core-package">
<h1>GPy.core package<a class="headerlink" href="#gpy-core-package" title="Permalink to this headline"></a></h1>
<div class="section" id="subpackages">
<h2>Subpackages<a class="headerlink" href="#subpackages" title="Permalink to this headline"></a></h2>
<div class="toctree-wrapper compound">
<ul>
<li class="toctree-l1"><a class="reference internal" href="GPy.core.parameterization.html">GPy.core.parameterization package</a><ul>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#submodules">Submodules</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.domains">GPy.core.parameterization.domains module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.index_operations">GPy.core.parameterization.index_operations module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.lists_and_dicts">GPy.core.parameterization.lists_and_dicts module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.observable_array">GPy.core.parameterization.observable_array module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.param">GPy.core.parameterization.param module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.parameter_core">GPy.core.parameterization.parameter_core module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.parameterized">GPy.core.parameterization.parameterized module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.priors">GPy.core.parameterization.priors module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.ties_and_remappings">GPy.core.parameterization.ties_and_remappings module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.transformations">GPy.core.parameterization.transformations module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization.variational">GPy.core.parameterization.variational module</a></li>
<li class="toctree-l2"><a class="reference internal" href="GPy.core.parameterization.html#module-GPy.core.parameterization">Module contents</a></li>
</ul>
</li>
</ul>
</div>
</div>
<div class="section" id="submodules">
<h2>Submodules<a class="headerlink" href="#submodules" title="Permalink to this headline"></a></h2>
</div>
<div class="section" id="module-GPy.core.gp">
<span id="gpy-core-gp-module"></span><h2>GPy.core.gp module<a class="headerlink" href="#module-GPy.core.gp" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="GPy.core.gp.GP">
<em class="property">class </em><code class="descclassname">GPy.core.gp.</code><code class="descname">GP</code><span class="sig-paren">(</span><em>X</em>, <em>Y</em>, <em>kernel</em>, <em>likelihood</em>, <em>inference_method=None</em>, <em>name='gp'</em>, <em>Y_metadata=None</em>, <em>normalizer=False</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.model.Model" title="GPy.core.model.Model"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.model.Model</span></code></a></p>
<p>General purpose Gaussian process model</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>X</strong> &#8211; input observations</li>
<li><strong>Y</strong> &#8211; output observations</li>
<li><strong>kernel</strong> &#8211; a GPy kernel, defaults to rbf+white</li>
<li><strong>likelihood</strong> &#8211; a GPy likelihood</li>
<li><strong>inference_method</strong> &#8211; The <a class="reference internal" href="GPy.inference.latent_function_inference.html#GPy.inference.latent_function_inference.LatentFunctionInference" title="GPy.inference.latent_function_inference.LatentFunctionInference"><code class="xref py py-class docutils literal"><span class="pre">LatentFunctionInference</span></code></a> inference method to use for this GP</li>
<li><strong>normalizer</strong> (<a class="reference internal" href="GPy.util.html#GPy.util.normalizer.Norm" title="GPy.util.normalizer.Norm"><em>Norm</em></a>) &#8211; normalize the outputs Y.
Prediction will be un-normalized using this normalizer.
If normalizer is None, we will normalize using MeanNorm.
If normalizer is False, no normalization will be done.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">model object</p>
</td>
</tr>
</tbody>
</table>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">Multiple independent outputs are allowed using columns of Y</p>
</div>
<dl class="method">
<dt id="GPy.core.gp.GP.infer_newX">
<code class="descname">infer_newX</code><span class="sig-paren">(</span><em>Y_new</em>, <em>optimize=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.infer_newX"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.infer_newX" title="Permalink to this definition"></a></dt>
<dd><p>Infer the distribution of X for the new observed data <em>Y_new</em>.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>Y_new</strong> (<em>numpy.ndarray</em>) &#8211; the new observed data for inference</li>
<li><strong>optimize</strong> (<em>boolean</em>) &#8211; whether to optimize the location of new X (True by default)</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">a tuple containing the posterior estimation of X and the model that optimize X</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">(<a class="reference internal" href="GPy.core.parameterization.html#GPy.core.parameterization.variational.VariationalPosterior" title="GPy.core.parameterization.variational.VariationalPosterior"><code class="xref py py-class docutils literal"><span class="pre">VariationalPosterior</span></code></a> or numpy.ndarray, <a class="reference internal" href="#GPy.core.model.Model" title="GPy.core.model.Model"><code class="xref py py-class docutils literal"><span class="pre">Model</span></code></a>)</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.input_sensitivity">
<code class="descname">input_sensitivity</code><span class="sig-paren">(</span><em>summarize=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.input_sensitivity"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.input_sensitivity" title="Permalink to this definition"></a></dt>
<dd><p>Returns the sensitivity for each dimension of this model</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.log_likelihood">
<code class="descname">log_likelihood</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.log_likelihood"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.log_likelihood" title="Permalink to this definition"></a></dt>
<dd><p>The log marginal likelihood of the model, <img class="math" src="_images/math/836d46125c08b336c780001fd9b6cfa2ecd6f6d6.png" alt="p(\mathbf{y})"/>, this is the objective function of the model being optimised</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.optimize">
<code class="descname">optimize</code><span class="sig-paren">(</span><em>optimizer=None</em>, <em>start=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.optimize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.optimize" title="Permalink to this definition"></a></dt>
<dd><p>Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.
kwargs are passed to the optimizer. They can be:</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>max_f_eval</strong> (<em>int</em>) &#8211; maximum number of function evaluations</li>
<li><strong>optimizer</strong> (<em>string</em>) &#8211; which optimizer to use (defaults to self.preferred optimizer), a range of optimisers can be found in <a href="#id1"><span class="problematic" id="id2">:module:`~GPy.inference.optimization`</span></a>, they include &#8216;scg&#8217;, &#8216;lbfgs&#8217;, &#8216;tnc&#8217;.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Messages:</th><td class="field-body"><p class="first last">whether to display during optimisation</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.parameters_changed">
<code class="descname">parameters_changed</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.parameters_changed"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.parameters_changed" title="Permalink to this definition"></a></dt>
<dd><p>Method that is called upon any changes to <a class="reference internal" href="GPy.core.parameterization.html#GPy.core.parameterization.param.Param" title="GPy.core.parameterization.param.Param"><code class="xref py py-class docutils literal"><span class="pre">Param</span></code></a> variables within the model.
In particular in the GP class this method reperforms inference, recalculating the posterior and log marginal likelihood and gradients of the model</p>
<div class="admonition warning">
<p class="first admonition-title">Warning</p>
<p class="last">This method is not designed to be called manually, the framework is set up to automatically call this method upon changes to parameters, if you call
this method yourself, there may be unexpected consequences.</p>
</div>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.plot">
<code class="descname">plot</code><span class="sig-paren">(</span><em>plot_limits=None</em>, <em>which_data_rows='all'</em>, <em>which_data_ycols='all'</em>, <em>fixed_inputs=[]</em>, <em>levels=20</em>, <em>samples=0</em>, <em>fignum=None</em>, <em>ax=None</em>, <em>resolution=None</em>, <em>plot_raw=False</em>, <em>linecol=None</em>, <em>fillcol=None</em>, <em>Y_metadata=None</em>, <em>data_symbol='kx'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.plot"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.plot" title="Permalink to this definition"></a></dt>
<dd><dl class="docutils">
<dt>Plot the posterior of the GP.</dt>
<dd><ul class="first last simple">
<li>In one dimension, the function is plotted with a shaded region identifying two standard deviations.</li>
<li>In two dimsensions, a contour-plot shows the mean predicted function</li>
<li>In higher dimensions, use fixed_inputs to plot the GP with some of the inputs fixed.</li>
</ul>
</dd>
</dl>
<p>Can plot only part of the data and part of the posterior functions
using which_data_rowsm which_data_ycols.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>plot_limits</strong> (<em>np.array</em>) &#8211; The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits</li>
<li><strong>which_data_rows</strong> (<em>&#8216;all&#8217; or a slice object to slice model.X, model.Y</em>) &#8211; which of the training data to plot (default all)</li>
<li><strong>which_data_ycols</strong> (<em>&#8216;all&#8217; or a list of integers</em>) &#8211; when the data has several columns (independant outputs), only plot these</li>
<li><strong>fixed_inputs</strong> (<em>a list of tuples</em>) &#8211; a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.</li>
<li><strong>resolution</strong> (<em>int</em>) &#8211; the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D</li>
<li><strong>levels</strong> (<em>int</em>) &#8211; number of levels to plot in a contour plot.</li>
<li><strong>levels</strong> &#8211; for 2D plotting, the number of contour levels to use is ax is None, create a new figure</li>
<li><strong>samples</strong> (<em>int</em>) &#8211; the number of a posteriori samples to plot</li>
<li><strong>fignum</strong> (<em>figure number</em>) &#8211; figure to plot on.</li>
<li><strong>ax</strong> (<em>axes handle</em>) &#8211; axes to plot on.</li>
<li><strong>linecol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) as is standard in matplotlib</em>) &#8211; color of line to plot [Tango.colorsHex[&#8216;darkBlue&#8217;]]</li>
<li><strong>fillcol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) as is standard in matplotlib</em>) &#8211; color of fill [Tango.colorsHex[&#8216;lightBlue&#8217;]]</li>
<li><strong>Y_metadata</strong> (<em>dict</em>) &#8211; additional data associated with Y which may be needed</li>
<li><strong>data_symbol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) alongside marker type, as is standard in matplotlib.</em>) &#8211; symbol as used matplotlib, by default this is a black cross (&#8216;kx&#8217;)</li>
</ul>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.plot_f">
<code class="descname">plot_f</code><span class="sig-paren">(</span><em>plot_limits=None</em>, <em>which_data_rows='all'</em>, <em>which_data_ycols='all'</em>, <em>fixed_inputs=[]</em>, <em>levels=20</em>, <em>samples=0</em>, <em>fignum=None</em>, <em>ax=None</em>, <em>resolution=None</em>, <em>plot_raw=True</em>, <em>linecol=None</em>, <em>fillcol=None</em>, <em>Y_metadata=None</em>, <em>data_symbol='kx'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.plot_f"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.plot_f" title="Permalink to this definition"></a></dt>
<dd><p>Plot the GP&#8217;s view of the world, where the data is normalized and before applying a likelihood.
This is a call to plot with plot_raw=True.
Data will not be plotted in this, as the GP&#8217;s view of the world
may live in another space, or units then the data.</p>
<p>Can plot only part of the data and part of the posterior functions
using which_data_rowsm which_data_ycols.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>plot_limits</strong> (<em>np.array</em>) &#8211; The limits of the plot. If 1D [xmin,xmax], if 2D [[xmin,ymin],[xmax,ymax]]. Defaluts to data limits</li>
<li><strong>which_data_rows</strong> (<em>&#8216;all&#8217; or a slice object to slice model.X, model.Y</em>) &#8211; which of the training data to plot (default all)</li>
<li><strong>which_data_ycols</strong> (<em>&#8216;all&#8217; or a list of integers</em>) &#8211; when the data has several columns (independant outputs), only plot these</li>
<li><strong>fixed_inputs</strong> (<em>a list of tuples</em>) &#8211; a list of tuple [(i,v), (i,v)...], specifying that input index i should be set to value v.</li>
<li><strong>resolution</strong> (<em>int</em>) &#8211; the number of intervals to sample the GP on. Defaults to 200 in 1D and 50 (a 50x50 grid) in 2D</li>
<li><strong>levels</strong> (<em>int</em>) &#8211; number of levels to plot in a contour plot.</li>
<li><strong>levels</strong> &#8211; for 2D plotting, the number of contour levels to use is ax is None, create a new figure</li>
<li><strong>samples</strong> (<em>int</em>) &#8211; the number of a posteriori samples to plot</li>
<li><strong>fignum</strong> (<em>figure number</em>) &#8211; figure to plot on.</li>
<li><strong>ax</strong> (<em>axes handle</em>) &#8211; axes to plot on.</li>
<li><strong>linecol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) as is standard in matplotlib</em>) &#8211; color of line to plot [Tango.colorsHex[&#8216;darkBlue&#8217;]]</li>
<li><strong>fillcol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) as is standard in matplotlib</em>) &#8211; color of fill [Tango.colorsHex[&#8216;lightBlue&#8217;]]</li>
<li><strong>Y_metadata</strong> (<em>dict</em>) &#8211; additional data associated with Y which may be needed</li>
<li><strong>data_symbol</strong> (<em>color either as Tango.colorsHex object or character (&#8216;r&#8217; is red, &#8216;g&#8217; is green) alongside marker type, as is standard in matplotlib.</em>) &#8211; symbol as used matplotlib, by default this is a black cross (&#8216;kx&#8217;)</li>
</ul>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.posterior_samples">
<code class="descname">posterior_samples</code><span class="sig-paren">(</span><em>X</em>, <em>size=10</em>, <em>full_cov=False</em>, <em>Y_metadata=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.posterior_samples"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.posterior_samples" title="Permalink to this definition"></a></dt>
<dd><p>Samples the posterior GP at the points X.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>X</strong> (<em>np.ndarray (Nnew x self.input_dim.)</em>) &#8211; the points at which to take the samples.</li>
<li><strong>size</strong> (<em>int.</em>) &#8211; the number of a posteriori samples.</li>
<li><strong>full_cov</strong> (<em>bool.</em>) &#8211; whether to return the full covariance matrix, or just the diagonal.</li>
<li><strong>noise_model</strong> (<em>integer.</em>) &#8211; for mixed noise likelihood, the noise model to use in the samples.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">Ysim: set of simulations, a Numpy array (N x samples).</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.posterior_samples_f">
<code class="descname">posterior_samples_f</code><span class="sig-paren">(</span><em>X</em>, <em>size=10</em>, <em>full_cov=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.posterior_samples_f"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.posterior_samples_f" title="Permalink to this definition"></a></dt>
<dd><p>Samples the posterior GP at the points X.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>X</strong> (<em>np.ndarray (Nnew x self.input_dim)</em>) &#8211; The points at which to take the samples.</li>
<li><strong>size</strong> (<em>int.</em>) &#8211; the number of a posteriori samples.</li>
<li><strong>full_cov</strong> (<em>bool.</em>) &#8211; whether to return the full covariance matrix, or just the diagonal.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Ysim: set of simulations</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">np.ndarray (N x samples)</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.predict">
<code class="descname">predict</code><span class="sig-paren">(</span><em>Xnew</em>, <em>full_cov=False</em>, <em>Y_metadata=None</em>, <em>kern=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.predict"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.predict" title="Permalink to this definition"></a></dt>
<dd><p>Predict the function(s) at the new point(s) Xnew.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>Xnew</strong> (<em>np.ndarray (Nnew x self.input_dim)</em>) &#8211; The points at which to make a prediction</li>
<li><strong>full_cov</strong> (<em>bool</em>) &#8211; whether to return the full covariance matrix, or just
the diagonal</li>
<li><strong>Y_metadata</strong> &#8211; metadata about the predicting point to pass to the likelihood</li>
<li><strong>kern</strong> &#8211; The kernel to use for prediction (defaults to the model
kern). this is useful for examining e.g. subprocesses.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last"><dl class="docutils">
<dt>(mean, var, lower_upper):</dt>
<dd><p class="first last">mean: posterior mean, a Numpy array, Nnew x self.input_dim
var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise
lower_upper: lower and upper boundaries of the 95% confidence intervals, Numpy arrays, Nnew x self.input_dim</p>
</dd>
</dl>
<p>If full_cov and self.input_dim &gt; 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew.
This is to allow for different normalizations of the output dimensions.</p>
</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.predict_quantiles">
<code class="descname">predict_quantiles</code><span class="sig-paren">(</span><em>X</em>, <em>quantiles=(2.5</em>, <em>97.5)</em>, <em>Y_metadata=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.predict_quantiles"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.predict_quantiles" title="Permalink to this definition"></a></dt>
<dd><p>Get the predictive quantiles around the prediction at X</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>X</strong> (<em>np.ndarray (Xnew x self.input_dim)</em>) &#8211; The points at which to make a prediction</li>
<li><strong>quantiles</strong> (<em>tuple</em>) &#8211; tuple of quantiles, default is (2.5, 97.5) which is the 95% interval</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">list of quantiles for each X and predictive quantiles for interval combination</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">[np.ndarray (Xnew x self.input_dim), np.ndarray (Xnew x self.input_dim)]</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.predictive_gradients">
<code class="descname">predictive_gradients</code><span class="sig-paren">(</span><em>Xnew</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.predictive_gradients"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.predictive_gradients" title="Permalink to this definition"></a></dt>
<dd><p>Compute the derivatives of the latent function with respect to X*</p>
<p>Given a set of points at which to predict X* (size [N*,Q]), compute the
derivatives of the mean and variance. Resulting arrays are sized:</p>
<blockquote>
<div><p>dmu_dX* &#8211; [N*, Q ,D], where D is the number of output in this GP (usually one).</p>
<p>dv_dX* &#8211; [N*, Q], (since all outputs have the same variance)</p>
</div></blockquote>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>X</strong> (<em>np.ndarray (Xnew x self.input_dim)</em>) &#8211; The points at which to get the predictive gradients</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body">dmu_dX, dv_dX</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body">[np.ndarray (N*, Q ,D), np.ndarray (N*,Q) ]</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.set_X">
<code class="descname">set_X</code><span class="sig-paren">(</span><em>X</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.set_X"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.set_X" title="Permalink to this definition"></a></dt>
<dd><p>Set the input data of the model</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>X</strong> (<em>np.ndarray</em>) &#8211; input observations</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.set_XY">
<code class="descname">set_XY</code><span class="sig-paren">(</span><em>X=None</em>, <em>Y=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.set_XY"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.set_XY" title="Permalink to this definition"></a></dt>
<dd><p>Set the input / output data of the model
This is useful if we wish to change our existing data but maintain the same model</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>X</strong> (<em>np.ndarray</em>) &#8211; input observations</li>
<li><strong>Y</strong> (<em>np.ndarray</em>) &#8211; output observations</li>
</ul>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.gp.GP.set_Y">
<code class="descname">set_Y</code><span class="sig-paren">(</span><em>Y</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/gp.html#GP.set_Y"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.gp.GP.set_Y" title="Permalink to this definition"></a></dt>
<dd><p>Set the output data of the model</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><strong>X</strong> (<em>np.ndarray</em>) &#8211; output observations</td>
</tr>
</tbody>
</table>
</dd></dl>
</dd></dl>
</div>
<div class="section" id="module-GPy.core.mapping">
<span id="gpy-core-mapping-module"></span><h2>GPy.core.mapping module<a class="headerlink" href="#module-GPy.core.mapping" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="GPy.core.mapping.Bijective_mapping">
<em class="property">class </em><code class="descclassname">GPy.core.mapping.</code><code class="descname">Bijective_mapping</code><span class="sig-paren">(</span><em>input_dim</em>, <em>output_dim</em>, <em>name='bijective_mapping'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Bijective_mapping"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Bijective_mapping" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.mapping.Mapping" title="GPy.core.mapping.Mapping"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.mapping.Mapping</span></code></a></p>
<p>This is a mapping that is bijective, i.e. you can go from X to f and
also back from f to X. The inverse mapping is called g().</p>
<dl class="method">
<dt id="GPy.core.mapping.Bijective_mapping.g">
<code class="descname">g</code><span class="sig-paren">(</span><em>f</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Bijective_mapping.g"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Bijective_mapping.g" title="Permalink to this definition"></a></dt>
<dd><p>Inverse mapping from output domain of the function to the inputs.</p>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="GPy.core.mapping.Mapping">
<em class="property">class </em><code class="descclassname">GPy.core.mapping.</code><code class="descname">Mapping</code><span class="sig-paren">(</span><em>input_dim</em>, <em>output_dim</em>, <em>name='mapping'</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="GPy.core.parameterization.html#GPy.core.parameterization.parameterized.Parameterized" title="GPy.core.parameterization.parameterized.Parameterized"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.parameterization.parameterized.Parameterized</span></code></a></p>
<p>Base model for shared behavior between models that can act like a mapping.</p>
<dl class="method">
<dt id="GPy.core.mapping.Mapping.df_dX">
<code class="descname">df_dX</code><span class="sig-paren">(</span><em>dL_df</em>, <em>X</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping.df_dX"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping.df_dX" title="Permalink to this definition"></a></dt>
<dd><p>Evaluate derivatives of mapping outputs with respect to inputs.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>dL_df</strong> (<em>ndarray (num_data x output_dim)</em>) &#8211; gradient of the objective with respect to the function.</li>
<li><strong>X</strong> (<em>ndarray (num_data x input_dim)</em>) &#8211; the input locations where derivatives are to be evaluated.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first last">matrix containing gradients of the function with respect to the inputs.</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.mapping.Mapping.df_dtheta">
<code class="descname">df_dtheta</code><span class="sig-paren">(</span><em>dL_df</em>, <em>X</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping.df_dtheta"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping.df_dtheta" title="Permalink to this definition"></a></dt>
<dd><p>The gradient of the outputs of the mapping with respect to each of the parameters.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>dL_df</strong> (<em>ndarray (num_data x output_dim)</em>) &#8211; gradient of the objective with respect to the function.</li>
<li><strong>X</strong> (<em>ndarray (num_data x input_dim)</em>) &#8211; input locations where the function is evaluated.</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">Matrix containing gradients with respect to parameters of each output for each input data.</p>
</td>
</tr>
<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">ndarray (num_params length)</p>
</td>
</tr>
</tbody>
</table>
</dd></dl>
<dl class="method">
<dt id="GPy.core.mapping.Mapping.f">
<code class="descname">f</code><span class="sig-paren">(</span><em>X</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping.f"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping.f" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="GPy.core.mapping.Mapping.plot">
<code class="descname">plot</code><span class="sig-paren">(</span><em>*args</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping.plot"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping.plot" title="Permalink to this definition"></a></dt>
<dd><dl class="docutils">
<dt>Plots the mapping associated with the model.</dt>
<dd><ul class="first last simple">
<li>In one dimension, the function is plotted.</li>
<li>In two dimensions, a contour-plot shows the function</li>
<li>In higher dimensions, we&#8217;ve not implemented this yet !TODO!</li>
</ul>
</dd>
</dl>
<p>Can plot only part of the data and part of the posterior functions
using which_data and which_functions</p>
<p>This is a convenience function: arguments are passed to
GPy.plotting.matplot_dep.models_plots.plot_mapping</p>
</dd></dl>
</dd></dl>
<dl class="class">
<dt id="GPy.core.mapping.Mapping_check_df_dX">
<em class="property">class </em><code class="descclassname">GPy.core.mapping.</code><code class="descname">Mapping_check_df_dX</code><span class="sig-paren">(</span><em>mapping=None</em>, <em>dL_df=None</em>, <em>X=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping_check_df_dX"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping_check_df_dX" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.mapping.Mapping_check_model" title="GPy.core.mapping.Mapping_check_model"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.mapping.Mapping_check_model</span></code></a></p>
<p>This class allows gradient checks for the gradient of a mapping with respect to X.</p>
</dd></dl>
<dl class="class">
<dt id="GPy.core.mapping.Mapping_check_df_dtheta">
<em class="property">class </em><code class="descclassname">GPy.core.mapping.</code><code class="descname">Mapping_check_df_dtheta</code><span class="sig-paren">(</span><em>mapping=None</em>, <em>dL_df=None</em>, <em>X=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping_check_df_dtheta"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping_check_df_dtheta" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.mapping.Mapping_check_model" title="GPy.core.mapping.Mapping_check_model"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.mapping.Mapping_check_model</span></code></a></p>
<p>This class allows gradient checks for the gradient of a mapping with respect to parameters.</p>
</dd></dl>
<dl class="class">
<dt id="GPy.core.mapping.Mapping_check_model">
<em class="property">class </em><code class="descclassname">GPy.core.mapping.</code><code class="descname">Mapping_check_model</code><span class="sig-paren">(</span><em>mapping=None</em>, <em>dL_df=None</em>, <em>X=None</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping_check_model"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping_check_model" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.model.Model" title="GPy.core.model.Model"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.model.Model</span></code></a></p>
<p>This is a dummy model class used as a base class for checking that the
gradients of a given mapping are implemented correctly. It enables
checkgradient() to be called independently on each mapping.</p>
<dl class="method">
<dt id="GPy.core.mapping.Mapping_check_model.log_likelihood">
<code class="descname">log_likelihood</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/mapping.html#Mapping_check_model.log_likelihood"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.mapping.Mapping_check_model.log_likelihood" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
</dd></dl>
</div>
<div class="section" id="module-GPy.core.model">
<span id="gpy-core-model-module"></span><h2>GPy.core.model module<a class="headerlink" href="#module-GPy.core.model" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="GPy.core.model.Model">
<em class="property">class </em><code class="descclassname">GPy.core.model.</code><code class="descname">Model</code><span class="sig-paren">(</span><em>name</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="GPy.core.parameterization.html#GPy.core.parameterization.parameterized.Parameterized" title="GPy.core.parameterization.parameterized.Parameterized"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.parameterization.parameterized.Parameterized</span></code></a></p>
<dl class="method">
<dt id="GPy.core.model.Model.ensure_default_constraints">
<code class="descname">ensure_default_constraints</code><span class="sig-paren">(</span><em>warning=True</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.ensure_default_constraints"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.ensure_default_constraints" title="Permalink to this definition"></a></dt>
<dd><p>Ensure that any variables which should clearly be positive
have been constrained somehow. The method performs a regular
expression search on parameter names looking for the terms
&#8216;variance&#8217;, &#8216;lengthscale&#8217;, &#8216;precision&#8217; and &#8216;kappa&#8217;. If any of
these terms are present in the name the parameter is
constrained positive.</p>
<p>DEPRECATED.</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.log_likelihood">
<code class="descname">log_likelihood</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.log_likelihood"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.log_likelihood" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.objective_function">
<code class="descname">objective_function</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.objective_function"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.objective_function" title="Permalink to this definition"></a></dt>
<dd><p>The objective function for the given algorithm.</p>
<p>This function is the true objective, which wants to be minimized.
Note that all parameters are already set and in place, so you just need
to return the objective function here.</p>
<p>For probabilistic models this is the negative log_likelihood
(including the MAP prior), so we return it here. If your model is not
probabilistic, just return your objective to minimize here!</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.objective_function_gradients">
<code class="descname">objective_function_gradients</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.objective_function_gradients"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.objective_function_gradients" title="Permalink to this definition"></a></dt>
<dd><p>The gradients for the objective function for the given algorithm.
The gradients are w.r.t. the <em>negative</em> objective function, as
this framework works with <em>negative</em> log-likelihoods as a default.</p>
<p>You can find the gradient for the parameters in self.gradient at all times.
This is the place, where gradients get stored for parameters.</p>
<p>This function is the true objective, which wants to be minimized.
Note that all parameters are already set and in place, so you just need
to return the gradient here.</p>
<p>For probabilistic models this is the gradient of the negative log_likelihood
(including the MAP prior), so we return it here. If your model is not
probabilistic, just return your <em>negative</em> gradient here!</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.optimize">
<code class="descname">optimize</code><span class="sig-paren">(</span><em>optimizer=None</em>, <em>start=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.optimize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.optimize" title="Permalink to this definition"></a></dt>
<dd><p>Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors.</p>
<p>kwargs are passed to the optimizer. They can be:</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
<li><strong>max_f_eval</strong> (<em>int</em>) &#8211; maximum number of function evaluations</li>
<li><strong>optimizer</strong> (<em>string</em>) &#8211; which optimizer to use (defaults to self.preferred optimizer)</li>
</ul>
</td>
</tr>
<tr class="field-even field"><th class="field-name">Messages:</th><td class="field-body"><p class="first last">whether to display during optimisation</p>
</td>
</tr>
</tbody>
</table>
<dl class="docutils">
<dt>Valid optimizers are:</dt>
<dd><ul class="first last">
<li><dl class="first docutils">
<dt>&#8216;scg&#8217;: scaled conjugate gradient method, recommended for stability.</dt>
<dd><p class="first last">See also GPy.inference.optimization.scg</p>
</dd>
</dl>
</li>
<li><p class="first">&#8216;fmin_tnc&#8217;: truncated Newton method (see scipy.optimize.fmin_tnc)</p>
</li>
<li><p class="first">&#8216;simplex&#8217;: the Nelder-Mead simplex method (see scipy.optimize.fmin),</p>
</li>
<li><p class="first">&#8216;lbfgsb&#8217;: the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b),</p>
</li>
<li><p class="first">&#8216;sgd&#8217;: stochastic gradient decsent (see scipy.optimize.sgd). For experts only!</p>
</li>
</ul>
</dd>
</dl>
</dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.optimize_SGD">
<code class="descname">optimize_SGD</code><span class="sig-paren">(</span><em>momentum=0.1</em>, <em>learning_rate=0.01</em>, <em>iterations=20</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.optimize_SGD"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.optimize_SGD" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="GPy.core.model.Model.optimize_restarts">
<code class="descname">optimize_restarts</code><span class="sig-paren">(</span><em>num_restarts=10</em>, <em>robust=False</em>, <em>verbose=True</em>, <em>parallel=False</em>, <em>num_processes=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/model.html#Model.optimize_restarts"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.model.Model.optimize_restarts" title="Permalink to this definition"></a></dt>
<dd><p>Perform random restarts of the model, and set the model to the best
seen solution.</p>
<p>If the robust flag is set, exceptions raised during optimizations will
be handled silently. If _all_ runs fail, the model is reset to the
existing parameter values.</p>
<p><strong>Notes</strong></p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>num_restarts</strong> (<em>int</em>) &#8211; number of restarts to use (default 10)</li>
<li><strong>robust</strong> (<em>bool</em>) &#8211; whether to handle exceptions silently or not (default False)</li>
<li><strong>parallel</strong> (<em>bool</em>) &#8211; whether to run each restart as a separate process. It relies on the multiprocessing module.</li>
<li><strong>num_processes</strong> &#8211; number of workers in the multiprocessing pool</li>
</ul>
</td>
</tr>
</tbody>
</table>
<p>**kwargs are passed to the optimizer. They can be:</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>max_f_eval</strong> (<em>int</em>) &#8211; maximum number of function evaluations</li>
<li><strong>max_iters</strong> (<em>int</em>) &#8211; maximum number of iterations</li>
<li><strong>messages</strong> (<em>bool</em>) &#8211; whether to display during optimisation</li>
</ul>
</td>
</tr>
</tbody>
</table>
<div class="admonition note">
<p class="first admonition-title">Note</p>
<p class="last">If num_processes is None, the number of workes in the</p>
</div>
<p>multiprocessing pool is automatically set to the number of processors
on the current machine.</p>
</dd></dl>
</dd></dl>
</div>
<div class="section" id="module-GPy.core.sparse_gp">
<span id="gpy-core-sparse-gp-module"></span><h2>GPy.core.sparse_gp module<a class="headerlink" href="#module-GPy.core.sparse_gp" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="GPy.core.sparse_gp.SparseGP">
<em class="property">class </em><code class="descclassname">GPy.core.sparse_gp.</code><code class="descname">SparseGP</code><span class="sig-paren">(</span><em>X</em>, <em>Y</em>, <em>Z</em>, <em>kernel</em>, <em>likelihood</em>, <em>inference_method=None</em>, <em>name='sparse gp'</em>, <em>Y_metadata=None</em>, <em>normalizer=False</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp.html#SparseGP"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp.SparseGP" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.gp.GP" title="GPy.core.gp.GP"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.gp.GP</span></code></a></p>
<p>A general purpose Sparse GP model</p>
<p>This model allows (approximate) inference using variational DTC or FITC
(Gaussian likelihoods) as well as non-conjugate sparse methods based on
these.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>X</strong> (<em>np.ndarray (num_data x input_dim)</em>) &#8211; inputs</li>
<li><strong>likelihood</strong> (<em>GPy.likelihood.(Gaussian | EP | Laplace)</em>) &#8211; a likelihood instance, containing the observed data</li>
<li><strong>kernel</strong> (<em>a GPy.kern.kern instance</em>) &#8211; the kernel (covariance function). See link kernels</li>
<li><strong>X_variance</strong> (<em>np.ndarray (num_data x input_dim) | None</em>) &#8211; The uncertainty in the measurements of X (Gaussian variance)</li>
<li><strong>Z</strong> (<em>np.ndarray (num_inducing x input_dim)</em>) &#8211; inducing inputs</li>
<li><strong>num_inducing</strong> (<em>int</em>) &#8211; Number of inducing points (optional, default 10. Ignored if Z is not None)</li>
</ul>
</td>
</tr>
</tbody>
</table>
<dl class="method">
<dt id="GPy.core.sparse_gp.SparseGP.has_uncertain_inputs">
<code class="descname">has_uncertain_inputs</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp.html#SparseGP.has_uncertain_inputs"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp.SparseGP.has_uncertain_inputs" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="method">
<dt id="GPy.core.sparse_gp.SparseGP.parameters_changed">
<code class="descname">parameters_changed</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp.html#SparseGP.parameters_changed"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp.SparseGP.parameters_changed" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
</dd></dl>
</div>
<div class="section" id="module-GPy.core.sparse_gp_mpi">
<span id="gpy-core-sparse-gp-mpi-module"></span><h2>GPy.core.sparse_gp_mpi module<a class="headerlink" href="#module-GPy.core.sparse_gp_mpi" title="Permalink to this headline"></a></h2>
<dl class="class">
<dt id="GPy.core.sparse_gp_mpi.SparseGP_MPI">
<em class="property">class </em><code class="descclassname">GPy.core.sparse_gp_mpi.</code><code class="descname">SparseGP_MPI</code><span class="sig-paren">(</span><em>X</em>, <em>Y</em>, <em>Z</em>, <em>kernel</em>, <em>likelihood</em>, <em>variational_prior=None</em>, <em>inference_method=None</em>, <em>name='sparse gp mpi'</em>, <em>Y_metadata=None</em>, <em>mpi_comm=None</em>, <em>normalizer=False</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp_mpi.html#SparseGP_MPI"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp_mpi.SparseGP_MPI" title="Permalink to this definition"></a></dt>
<dd><p>Bases: <a class="reference internal" href="#GPy.core.sparse_gp.SparseGP" title="GPy.core.sparse_gp.SparseGP"><code class="xref py py-class docutils literal"><span class="pre">GPy.core.sparse_gp.SparseGP</span></code></a></p>
<p>A general purpose Sparse GP model with MPI parallelization support</p>
<p>This model allows (approximate) inference using variational DTC or FITC
(Gaussian likelihoods) as well as non-conjugate sparse methods based on
these.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first last simple">
<li><strong>X</strong> (<em>np.ndarray (num_data x input_dim)</em>) &#8211; inputs</li>
<li><strong>likelihood</strong> (<em>GPy.likelihood.(Gaussian | EP | Laplace)</em>) &#8211; a likelihood instance, containing the observed data</li>
<li><strong>kernel</strong> (<em>a GPy.kern.kern instance</em>) &#8211; the kernel (covariance function). See link kernels</li>
<li><strong>X_variance</strong> (<em>np.ndarray (num_data x input_dim) | None</em>) &#8211; The uncertainty in the measurements of X (Gaussian variance)</li>
<li><strong>Z</strong> (<em>np.ndarray (num_inducing x input_dim)</em>) &#8211; inducing inputs</li>
<li><strong>num_inducing</strong> (<em>int</em>) &#8211; Number of inducing points (optional, default 10. Ignored if Z is not None)</li>
<li><strong>mpi_comm</strong> (<em>mpi4py.MPI.Intracomm</em>) &#8211; The communication group of MPI, e.g. mpi4py.MPI.COMM_WORLD</li>
</ul>
</td>
</tr>
</tbody>
</table>
<dl class="method">
<dt id="GPy.core.sparse_gp_mpi.SparseGP_MPI.optimize">
<code class="descname">optimize</code><span class="sig-paren">(</span><em>optimizer=None</em>, <em>start=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp_mpi.html#SparseGP_MPI.optimize"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp_mpi.SparseGP_MPI.optimize" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
<dl class="attribute">
<dt id="GPy.core.sparse_gp_mpi.SparseGP_MPI.optimizer_array">
<code class="descname">optimizer_array</code><a class="headerlink" href="#GPy.core.sparse_gp_mpi.SparseGP_MPI.optimizer_array" title="Permalink to this definition"></a></dt>
<dd><p>Array for the optimizer to work on.
This array always lives in the space for the optimizer.
Thus, it is untransformed, going from Transformations.</p>
<p>Setting this array, will make sure the transformed parameters for this model
will be set accordingly. It has to be set with an array, retrieved from
this method, as e.g. fixing will resize the array.</p>
<p>The optimizer should only interfere with this array, such that transformations
are secured.</p>
</dd></dl>
<dl class="method">
<dt id="GPy.core.sparse_gp_mpi.SparseGP_MPI.parameters_changed">
<code class="descname">parameters_changed</code><span class="sig-paren">(</span><span class="sig-paren">)</span><a class="reference internal" href="_modules/GPy/core/sparse_gp_mpi.html#SparseGP_MPI.parameters_changed"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#GPy.core.sparse_gp_mpi.SparseGP_MPI.parameters_changed" title="Permalink to this definition"></a></dt>
<dd></dd></dl>
</dd></dl>
</div>
<div class="section" id="gpy-core-svigp-module">
<h2>GPy.core.svigp module<a class="headerlink" href="#gpy-core-svigp-module" title="Permalink to this headline"></a></h2>
</div>
<div class="section" id="gpy-core-symbolic-module">
<h2>GPy.core.symbolic module<a class="headerlink" href="#gpy-core-symbolic-module" title="Permalink to this headline"></a></h2>
</div>
<div class="section" id="module-GPy.core">
<span id="module-contents"></span><h2>Module contents<a class="headerlink" href="#module-GPy.core" title="Permalink to this headline"></a></h2>
</div>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h3><a href="index.html">Table Of Contents</a></h3>
<ul>
<li><a class="reference internal" href="#">GPy.core package</a><ul>
<li><a class="reference internal" href="#subpackages">Subpackages</a></li>
<li><a class="reference internal" href="#submodules">Submodules</a></li>
<li><a class="reference internal" href="#module-GPy.core.gp">GPy.core.gp module</a></li>
<li><a class="reference internal" href="#module-GPy.core.mapping">GPy.core.mapping module</a></li>
<li><a class="reference internal" href="#module-GPy.core.model">GPy.core.model module</a></li>
<li><a class="reference internal" href="#module-GPy.core.sparse_gp">GPy.core.sparse_gp module</a></li>
<li><a class="reference internal" href="#module-GPy.core.sparse_gp_mpi">GPy.core.sparse_gp_mpi module</a></li>
<li><a class="reference internal" href="#gpy-core-svigp-module">GPy.core.svigp module</a></li>
<li><a class="reference internal" href="#gpy-core-symbolic-module">GPy.core.symbolic module</a></li>
<li><a class="reference internal" href="#module-GPy.core">Module contents</a></li>
</ul>
</li>
</ul>
<h4>Previous topic</h4>
<p class="topless"><a href="GPy.html"
title="previous chapter">GPy package</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="GPy.core.parameterization.html"
title="next chapter">GPy.core.parameterization package</a></p>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="_sources/GPy.core.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="search.html" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
<p class="searchtip" style="font-size: 90%">
Enter search terms or a module, class or function name.
</p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="related" role="navigation" aria-label="related navigation">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="genindex.html" title="General Index"
>index</a></li>
<li class="right" >
<a href="py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="GPy.core.parameterization.html" title="GPy.core.parameterization package"
>next</a> |</li>
<li class="right" >
<a href="GPy.html" title="GPy package"
>previous</a> |</li>
<li class="nav-item nav-item-0"><a href="index.html">GPy documentation</a> &raquo;</li>
<li class="nav-item nav-item-1"><a href="GPy.html" >GPy package</a> &raquo;</li>
</ul>
</div>
<div class="footer" role="contentinfo">
&copy; Copyright 2013, Author.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.3.1.
</div>
</body>
</html>