Merge remote-tracking branch 'origin/devel' into integral

This commit is contained in:
Michael T Smith 2016-06-13 14:31:32 +01:00
commit d96e71c958
12 changed files with 57 additions and 49 deletions

View file

@ -773,7 +773,7 @@ class DGPLVM_Lamda(Prior, Parameterized):
def compute_cls(self, x):
cls = {}
# Appending each data point to its proper class
for j in xrange(self.datanum):
for j in range(self.datanum):
class_label = self.get_class_label(self.lbl[j])
if class_label not in cls:
cls[class_label] = []
@ -792,7 +792,7 @@ class DGPLVM_Lamda(Prior, Parameterized):
# Adding data points as tuple to the dictionary so that we can access indices
def compute_indices(self, x):
data_idx = {}
for j in xrange(self.datanum):
for j in range(self.datanum):
class_label = self.get_class_label(self.lbl[j])
if class_label not in data_idx:
data_idx[class_label] = []
@ -811,7 +811,7 @@ class DGPLVM_Lamda(Prior, Parameterized):
else:
lst_idx = []
# Here we put indices of each class in to the list called lst_idx_all
for m in xrange(len(data_idx[i])):
for m in range(len(data_idx[i])):
lst_idx.append(data_idx[i][m][0])
lst_idx_all.append(lst_idx)
return lst_idx_all
@ -847,7 +847,7 @@ class DGPLVM_Lamda(Prior, Parameterized):
# pdb.set_trace()
# Calculating Bi
B_i[i] = (M_i[i] - M_0).reshape(1, self.dim)
for k in xrange(self.datanum):
for k in range(self.datanum):
for i in data_idx:
N_i = float(len(data_idx[i]))
if k in lst_idx_all[i]:

View file

@ -111,8 +111,8 @@ class Symbolic_core():
# rows = func['function'].shape[0]
# cols = func['function'].shape[1]
# self.expressions[key]['derivative'] = sym.zeros(rows, cols)
# for i in xrange(rows):
# for j in xrange(cols):
# for i in range(rows):
# for j in range(cols):
# self.expressions[key]['derivative'][i, j] = extract_derivative(func['function'][i, j], derivative_arguments)
# else:
self.expressions[key]['derivative'] = extract_derivative(func['function'], derivative_arguments)
@ -123,7 +123,7 @@ class Symbolic_core():
val = 1.0
# TODO: improve approach for initializing parameters.
if parameters is not None:
if parameters.has_key(theta.name):
if theta.name in parameters:
val = parameters[theta.name]
# Add parameter.
@ -176,7 +176,7 @@ class Symbolic_core():
return gradient
def eval_gradients_X(self, function, partial, **kwargs):
if kwargs.has_key('X'):
if 'X' in kwargs:
gradients_X = np.zeros_like(kwargs['X'])
self.eval_update_cache(**kwargs)
for i, theta in enumerate(self.variables['X']):
@ -405,7 +405,7 @@ class Symbolic_core():
if var_name == var.name:
expr = expr.subs(var, sub)
break
for m, r in function_substitutes.iteritems():
for m, r in function_substitutes.items():
expr = expr.replace(m, r)#normcdfln, lambda arg : sym.log(normcdf(arg)))
return expr.simplify()
@ -417,4 +417,4 @@ class Symbolic_core():
else:
return x[0]
return sorted(var_dict.iteritems(), key=sort_key, reverse=reverse)
return sorted(var_dict.items(), key=sort_key, reverse=reverse)

View file

@ -184,7 +184,7 @@ def bgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40,
data_show = GPy.plotting.matplot_dep.visualize.vector_show((m.Y[0, :]))
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X.mean.values[0:1, :], # @UnusedVariable
m, data_show, latent_axes=latent_axes, sense_axes=sense_axes, labels=m.data_labels)
raw_input('Press enter to finish')
input('Press enter to finish')
plt.close(fig)
return m
@ -210,7 +210,7 @@ def ssgplvm_oil(optimize=True, verbose=1, plot=True, N=200, Q=7, num_inducing=40
data_show = GPy.plotting.matplot_dep.visualize.vector_show((m.Y[0, :]))
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm_dimselect(m.X.mean.values[0:1, :], # @UnusedVariable
m, data_show, latent_axes=latent_axes, sense_axes=sense_axes, labels=m.data_labels)
raw_input('Press enter to finish')
input('Press enter to finish')
plt.close(fig)
return m
@ -242,7 +242,7 @@ def _simulate_matern(D1, D2, D3, N, num_inducing, plot_sim=False):
fig.clf()
ax = fig.add_subplot(2, 1, 1)
labls = slist_names
for S, lab in itertools.izip(slist, labls):
for S, lab in zip(slist, labls):
ax.plot(S, label=lab)
ax.legend()
for i, Y in enumerate(Ylist):
@ -288,7 +288,7 @@ def _simulate_sincos(D1, D2, D3, N, num_inducing, plot_sim=False):
fig.clf()
ax = fig.add_subplot(2, 1, 1)
labls = slist_names
for S, lab in itertools.izip(slist, labls):
for S, lab in zip(slist, labls):
ax.plot(S, label=lab)
ax.legend()
for i, Y in enumerate(Ylist):
@ -520,7 +520,7 @@ def brendan_faces(optimize=True, verbose=True, plot=True):
y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, order='F', invert=False, scale=False)
lvm = GPy.plotting.matplot_dep.visualize.lvm(m.X.mean[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
input('Press enter to finish')
return m
@ -542,7 +542,7 @@ def olivetti_faces(optimize=True, verbose=True, plot=True):
y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.image_show(y[None, :], dimensions=(112, 92), transpose=False, invert=False, scale=False)
lvm = GPy.plotting.matplot_dep.visualize.lvm(m.X.mean[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
input('Press enter to finish')
return m
@ -577,7 +577,7 @@ def stick(kernel=None, optimize=True, verbose=True, plot=True):
y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[:1, :].copy(), m, data_show, latent_axes=ax)
raw_input('Press enter to finish')
input('Press enter to finish')
lvm_visualizer.close()
data_show.close()
return m
@ -598,7 +598,7 @@ def bcgplvm_linear_stick(kernel=None, optimize=True, verbose=True, plot=True):
y = m.likelihood.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
input('Press enter to finish')
return m
@ -619,7 +619,7 @@ def bcgplvm_stick(kernel=None, optimize=True, verbose=True, plot=True):
y = m.likelihood.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.stick_show(y[None, :], connect=data['connect'])
GPy.plotting.matplot_dep.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
# raw_input('Press enter to finish')
# input('Press enter to finish')
return m
@ -669,7 +669,7 @@ def stick_bgplvm(model=None, optimize=True, verbose=True, plot=True):
fig.canvas.draw()
# Canvas.show doesn't work on OSX.
#fig.canvas.show()
raw_input('Press enter to finish')
input('Press enter to finish')
return m
@ -693,7 +693,7 @@ def cmu_mocap(subject='35', motion=['01'], in_place=True, optimize=True, verbose
y = m.Y[0, :]
data_show = GPy.plotting.matplot_dep.visualize.skeleton_show(y[None, :], data['skel'])
lvm_visualizer = GPy.plotting.matplot_dep.visualize.lvm(m.X[0].copy(), m, data_show, latent_axes=ax)
raw_input('Press enter to finish')
input('Press enter to finish')
lvm_visualizer.close()
data_show.close()

View file

@ -10,17 +10,17 @@ Y = np.sin(X) + np.random.randn(*X.shape)*0.1
kernel1 = GPy.kern.Matern32(X.shape[1])
m1 = GPy.models.GPRegression(X,Y, kernel1)
print m1
print(m1)
m1.optimize(optimizer='bfgs',messages=True)
print m1
print(m1)
kernel2 = GPy.kern.sde_Matern32(X.shape[1])
#m2 = SS_model.StateSpace(X,Y, kernel2)
m2 = GPy.models.StateSpace(X,Y, kernel2)
print m2
print(m2)
m2.optimize(optimizer='bfgs',messages=True)
print m2
print(m2)

View file

@ -24,9 +24,6 @@ from .src.ODE_st import ODE_st
from .src.ODE_t import ODE_t
from .src.poly import Poly
from .src.eq_ode2 import EQ_ODE2
from .src.integral import Integral
from .src.integral_limits import Integral_Limits
from .src.multidimensional_integral_limits import Multidimensional_Integral_Limits
from .src.eq_ode1 import EQ_ODE1
from .src.trunclinear import TruncLinear,TruncLinear_inf
from .src.splitKern import SplitKern,DEtime

View file

@ -678,7 +678,7 @@ class Likelihood(Parameterized):
burnin_cache = np.zeros(par_chains)
burnin_cache[:] = starting_loc.flatten()
burning_in = True
for i in xrange(burn_in+num_samples):
for i in range(burn_in+num_samples):
next_ind = i-burn_in
if burning_in:
old_y = burnin_cache

View file

@ -291,12 +291,12 @@ class SSGPLVM(SparseGP_MPI):
Xs[b>self.X.gamma.values] = 0
invcov = (Xs[:,:,:,None]*Xs[:,:,None,:]).sum(1)/noise_var+np.eye(Q)
cov = np.array([pdinv(invcov[s_idx])[0] for s_idx in xrange(invcov.shape[0])])
cov = np.array([pdinv(invcov[s_idx])[0] for s_idx in range(invcov.shape[0])])
Ws = np.empty((nSamples, Q, D))
tmp = (np.transpose(Xs, (0,2,1)).reshape(nSamples*Q,N).dot(self.Y)).reshape(nSamples,Q,D)
mean = (cov[:,:,:,None]*tmp[:,None,:,:]).sum(2)/noise_var
zeros = np.zeros((Q,))
for s_idx in xrange(Xs.shape[0]):
for s_idx in range(Xs.shape[0]):
Ws[s_idx] = (np.random.multivariate_normal(mean=zeros,cov=cov[s_idx],size=(D,))).T+mean[s_idx]
if raw_samples:

View file

@ -25,7 +25,7 @@ class SSMRD(Model):
self.X = NormalPosterior(means=X, variances=X_variance)
if kernels is None:
kernels = [RBF(input_dim, lengthscale=1./fracs, ARD=True) for i in xrange(len(Ylist))]
kernels = [RBF(input_dim, lengthscale=1./fracs, ARD=True) for i in range(len(Ylist))]
if Zs is None:
Zs = [None]* len(Ylist)
if likelihoods is None:
@ -34,9 +34,9 @@ class SSMRD(Model):
inference_methods = [None]* len(Ylist)
if IBP:
self.var_priors = [IBPPrior_SSMRD(len(Ylist),input_dim,alpha=alpha) for i in xrange(len(Ylist))]
self.var_priors = [IBPPrior_SSMRD(len(Ylist),input_dim,alpha=alpha) for i in range(len(Ylist))]
else:
self.var_priors = [SpikeAndSlabPrior_SSMRD(nModels=len(Ylist),pi=pi,learnPi=False, group_spike=group_spike) for i in xrange(len(Ylist))]
self.var_priors = [SpikeAndSlabPrior_SSMRD(nModels=len(Ylist),pi=pi,learnPi=False, group_spike=group_spike) for i in range(len(Ylist))]
self.models = [SSGPLVM(y, input_dim, X=X.copy(), X_variance=X_variance.copy(), Gamma=Gammas[i], num_inducing=num_inducing,Z=Zs[i], learnPi=False, group_spike=group_spike,
kernel=kernels[i],inference_method=inference_methods[i],likelihood=likelihoods[i], variational_prior=self.var_priors[i], IBP=IBP, tau=None if taus is None else taus[i],
name='model_'+str(i), mpi_comm=mpi_comm, sharedX=True) for i,y in enumerate(Ylist)]
@ -73,7 +73,7 @@ class SSMRD(Model):
# Divide latent dimensions
idx = np.empty((input_dim,),dtype=np.int)
residue = (input_dim)%(len(Ylist))
for i in xrange(len(Ylist)):
for i in range(len(Ylist)):
if i < residue:
size = input_dim/len(Ylist)+1
idx[i*size:(i+1)*size] = i
@ -86,7 +86,7 @@ class SSMRD(Model):
X = np.empty((Ylist[0].shape[0],input_dim))
fracs = np.empty((input_dim,))
from ..util.initialization import initialize_latent
for i in xrange(len(Ylist)):
for i in range(len(Ylist)):
Y = Ylist[i]
dim = (idx==i).sum()
if dim>0:

View file

@ -13,7 +13,7 @@ import scipy as sp
import scipy.linalg as linalg
try:
import state_space_setup
from . import state_space_setup
setup_available = True
except ImportError as e:
setup_available = False

View file

@ -96,3 +96,14 @@ class TestDebug(unittest.TestCase):
self.assertTrue((2, np.median(X.mean.values[:,2])) in fixed)
self.assertTrue(len([t for t in fixed if t[0] == 1]) == 0) # Unfixed input should not be in fixed
def test_subarray(self):
import GPy
X = np.zeros((3,6), dtype=bool)
X[[1,1,1],[0,4,5]] = 1
X[1:,[2,3]] = 1
d = GPy.util.subarray_and_sorting.common_subarrays(X,axis=1)
self.assertTrue(len(d) == 3)
X[:, d[tuple(X[:,0])]]
self.assertTrue(d[tuple(X[:,4])] == d[tuple(X[:,0])] == [0, 4, 5])
self.assertTrue(d[tuple(X[:,1])] == [1])

View file

@ -73,7 +73,7 @@ def prompt_user(prompt):
try:
print(prompt)
choice = raw_input().lower()
choice = input().lower()
# would like to test for exception here, but not sure if we can do that without importing IPython
except:
print('Stdin is not implemented.')
@ -96,16 +96,16 @@ def prompt_user(prompt):
def data_available(dataset_name=None):
"""Check if the data set is available on the local machine already."""
try:
from itertools import izip_longest
from itertools import zip_longest
except ImportError:
from itertools import zip_longest as izip_longest
from itertools import zip_longest as zip_longest
dr = data_resources[dataset_name]
zip_urls = (dr['files'], )
if 'save_names' in dr: zip_urls += (dr['save_names'], )
else: zip_urls += ([],)
for file_list, save_list in izip_longest(*zip_urls, fillvalue=[]):
for f, s in izip_longest(file_list, save_list, fillvalue=None):
for file_list, save_list in zip_longest(*zip_urls, fillvalue=[]):
for f, s in zip_longest(file_list, save_list, fillvalue=None):
if s is not None: f=s # If there is a save_name given, use that one
if not os.path.exists(os.path.join(data_path, dataset_name, f)):
return False
@ -138,7 +138,7 @@ def download_url(url, store_directory, save_name=None, messages=True, suffix='')
raise ValueError('Tried url ' + url + suffix + ' and received server error ' + str(response.code))
with open(save_name, 'wb') as f:
meta = response.info()
content_length_str = meta.getheaders("Content-Length")
content_length_str = meta.get("Content-Length")
if content_length_str:
file_size = int(content_length_str[0])
else:
@ -214,14 +214,14 @@ def download_data(dataset_name=None):
zip_urls = (dr['urls'], dr['files'])
if dr.has_key('save_names'): zip_urls += (dr['save_names'], )
if 'save_names' in dr: zip_urls += (dr['save_names'], )
else: zip_urls += ([],)
if dr.has_key('suffices'): zip_urls += (dr['suffices'], )
if 'suffices' in dr: zip_urls += (dr['suffices'], )
else: zip_urls += ([],)
for url, files, save_names, suffices in itertools.izip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in itertools.izip_longest(files, save_names, suffices, fillvalue=None):
for url, files, save_names, suffices in itertools.zip_longest(*zip_urls, fillvalue=[]):
for f, save_name, suffix in itertools.zip_longest(files, save_names, suffices, fillvalue=None):
download_url(os.path.join(url,f), dataset_name, save_name, suffix=suffix)
return True
@ -361,7 +361,7 @@ def football_data(season='1314', data_set='football_data'):
return league_dict[string]
def football2num(string):
if football_dict.has_key(string):
if string in football_dict:
return football_dict[string]
else:
football_dict[string] = len(football_dict)+1

View file

@ -50,7 +50,7 @@ def common_subarrays(X, axis=0):
cnt = count()
def accumulate(x, s, c):
t = tuple(x)
col = c.next()
col = next(c)
iadd(s[t], [col])
return None
if axis == 0: [accumulate(x, subarrays, cnt) for x in X]