[kernel] fix #218 and #325

This commit is contained in:
mzwiessele 2016-03-10 10:21:17 +00:00
parent af76126ef1
commit 30c6fc90ff
3 changed files with 55 additions and 30 deletions

View file

@ -19,8 +19,8 @@ class Add(CombinationKernel):
if isinstance(kern, Add):
del subkerns[i]
for part in kern.parts[::-1]:
kern.unlink_parameter(part)
subkerns.insert(i, part)
#kern.unlink_parameter(part)
subkerns.insert(i, part.copy())
super(Add, self).__init__(subkerns, name)
self._exact_psicomp = self._check_exact_psicomp()
@ -241,16 +241,20 @@ class Add(CombinationKernel):
[np.add(target_grads[i],grads[i],target_grads[i]) for i in range(len(grads))]
return target_grads
def add(self, other):
if isinstance(other, Add):
other_params = other.parameters[:]
for p in other_params:
other.unlink_parameter(p)
self.link_parameters(*other_params)
else:
self.link_parameter(other)
self.input_dim, self._all_dims_active = self.get_input_dim_active_dims(self.parts)
return self
#def add(self, other):
# parts = self.parts
# if 0:#isinstance(other, Add):
# #other_params = other.parameters[:]
# for p in other.parts[:]:
# other.unlink_parameter(p)
# parts.extend(other.parts)
# #self.link_parameters(*other_params)
#
# else:
# #self.link_parameter(other)
# parts.append(other)
# #self.input_dim, self._all_dims_active = self.get_input_dim_active_dims(parts)
# return Add([p for p in parts], self.name)
def input_sensitivity(self, summarize=True):
if summarize:

View file

@ -48,11 +48,12 @@ class Kern(Parameterized):
if active_dims is None:
active_dims = np.arange(input_dim)
self.active_dims = active_dims
self._all_dims_active = np.atleast_1d(active_dims).astype(int)
assert self._all_dims_active.size == self.input_dim, "input_dim={} does not match len(active_dim)={}, _all_dims_active={}".format(self.input_dim, self._all_dims_active.size, self._all_dims_active)
self.active_dims = np.asarray(active_dims, np.int_)
self._all_dims_active = np.atleast_1d(self.active_dims).astype(int)
assert self.active_dims.size == self.input_dim, "input_dim={} does not match len(active_dim)={}".format(self.input_dim, self._all_dims_active.size)
self._sliced_X = 0
self.useGPU = self._support_GPU and useGPU
@ -322,10 +323,20 @@ class CombinationKernel(Kern):
:param array-like extra_dims: if needed extra dimensions for the combination kernel to work on
"""
assert all([isinstance(k, Kern) for k in kernels])
extra_dims = np.array(extra_dims, dtype=int)
input_dim, active_dims = self.get_input_dim_active_dims(kernels, extra_dims)
extra_dims = np.asarray(extra_dims, dtype=int)
active_dims = reduce(np.union1d, (np.r_[x.active_dims] for x in kernels), np.array([], dtype=int))
input_dim = active_dims.size
if extra_dims is not None:
input_dim += extra_dims.size
# initialize the kernel with the full input_dim
super(CombinationKernel, self).__init__(input_dim, active_dims, name)
effective_input_dim = reduce(max, (k._all_dims_active.max() for k in kernels)) + 1
self._all_dims_active = np.array(np.concatenate((np.arange(effective_input_dim), extra_dims if extra_dims is not None else [])), dtype=int)
self.extra_dims = extra_dims
self.link_parameters(*kernels)
@ -333,16 +344,8 @@ class CombinationKernel(Kern):
def parts(self):
return self.parameters
def get_input_dim_active_dims(self, kernels, extra_dims = None):
self.active_dims = reduce(np.union1d, (np.r_[x.active_dims] for x in kernels), np.array([], dtype=int))
#_all_dims_active = np.array(np.concatenate((_all_dims_active, extra_dims if extra_dims is not None else [])), dtype=int)
input_dim = reduce(max, (k._all_dims_active.max() for k in kernels)) + 1
if extra_dims is not None:
input_dim += extra_dims.size
_all_dims_active = np.arange(input_dim)
return input_dim, _all_dims_active
def _set_all_dims_ative(self):
self._all_dims_active = np.atleast_1d(self.active_dims).astype(int)
def input_sensitivity(self, summarize=True):
"""