Exception fixes for Python 3 compat

This commit is contained in:
Mike Croucher 2015-02-26 13:33:39 +00:00
parent 70c8f4a410
commit c4fb58176d
8 changed files with 25 additions and 25 deletions

View file

@ -60,7 +60,7 @@ class Mapping(Parameterized):
from ..plotting.matplot_dep import models_plots
mapping_plots.plot_mapping(self,*args)
else:
raise NameError, "matplotlib package has not been imported."
raise NameError("matplotlib package has not been imported.")
class Bijective_mapping(Mapping):
"""
@ -111,7 +111,7 @@ class Mapping_check_model(Model):
return (self.dL_df*self.mapping.f(self.X)).sum()
def _log_likelihood_gradients(self):
raise NotImplementedError, "This needs to be implemented to use the Mapping_check_model class."
raise NotImplementedError("This needs to be implemented to use the Mapping_check_model class.")
class Mapping_check_df_dtheta(Mapping_check_model):
"""This class allows gradient checks for the gradient of a mapping with respect to parameters. """

View file

@ -30,7 +30,7 @@ class Model(Parameterized):
self.add_observer(self.tie, self.tie._parameters_changed_notification, priority=-500)
def log_likelihood(self):
raise NotImplementedError, "this needs to be implemented to use the model class"
raise NotImplementedError("this needs to be implemented to use the model class")
def _log_likelihood_gradients(self):
return self.gradient.copy()
@ -119,7 +119,7 @@ class Model(Parameterized):
DEPRECATED.
"""
raise DeprecationWarning, 'parameters now have default constraints'
raise DeprecationWarning('parameters now have default constraints')
def objective_function(self):
"""

View file

@ -32,7 +32,7 @@ class ArrayList(list):
if el is item:
return index
index += 1
raise ValueError, "{} is not in list".format(item)
raise ValueError("{} is not in list".format(item))
pass
class ObserverList(object):

View file

@ -36,7 +36,7 @@ def adjust_name_for_printing(name):
name = name.replace("/", "_l_").replace("@", '_at_')
name = name.replace("(", "_of_").replace(")", "")
if re.match(r'^[a-zA-Z_][a-zA-Z0-9-_]*$', name) is None:
raise NameError, "name {} converted to {} cannot be further converted to valid python variable name!".format(name2, name)
raise NameError("name {} converted to {} cannot be further converted to valid python variable name!".format(name2, name))
return name
return ''
@ -65,13 +65,13 @@ class Parentable(object):
Gets called, when the parent changed, so we can adjust our
inner attributes according to the new parent.
"""
raise NotImplementedError, "shouldnt happen, Parentable objects need to be able to change their parent"
raise NotImplementedError("shouldnt happen, Parentable objects need to be able to change their parent")
def _disconnect_parent(self, *args, **kw):
"""
Disconnect this object from its parent
"""
raise NotImplementedError, "Abstract superclass"
raise NotImplementedError("Abstract superclass")
@property
def _highest_parent_(self):
@ -214,7 +214,7 @@ class Gradcheckable(Pickleable, Parentable):
Perform the checkgrad on the model.
TODO: this can be done more efficiently, when doing it inside here
"""
raise HierarchyError, "This parameter is not in a model with a likelihood, and, therefore, cannot be gradient checked!"
raise HierarchyError("This parameter is not in a model with a likelihood, and, therefore, cannot be gradient checked!")
class Nameable(Gradcheckable):
"""
@ -652,10 +652,10 @@ class OptimizationHandlable(Indexable):
self.trigger_update()
def _get_params_transformed(self):
raise DeprecationWarning, "_get|set_params{_optimizer_copy_transformed} is deprecated, use self.optimizer array insetad!"
raise DeprecationWarning("_get|set_params{_optimizer_copy_transformed} is deprecated, use self.optimizer array insetad!")
#
def _set_params_transformed(self, p):
raise DeprecationWarning, "_get|set_params{_optimizer_copy_transformed} is deprecated, use self.optimizer array insetad!"
raise DeprecationWarning("_get|set_params{_optimizer_copy_transformed} is deprecated, use self.optimizer array insetad!")
def _trigger_params_changed(self, trigger_parent=True):
"""
@ -701,7 +701,7 @@ class OptimizationHandlable(Indexable):
Return the number of parameters of this parameter_handle.
Param objects will always return 0.
"""
raise NotImplemented, "Abstract, please implement in respective classes"
raise NotImplemented("Abstract, please implement in respective classes")
def parameter_names(self, add_self=False, adjust_for_printing=False, recursive=True):
"""

View file

@ -131,7 +131,7 @@ class Parameterized(Parameterizable):
if param.has_parent():
def visit(parent, self):
if parent is self:
raise HierarchyError, "You cannot add a parameter twice into the hierarchy"
raise HierarchyError("You cannot add a parameter twice into the hierarchy")
param.traverse_parents(visit, self)
param._parent_.unlink_parameter(param)
# make sure the size is set
@ -173,7 +173,7 @@ class Parameterized(Parameterizable):
self._highest_parent_._connect_fixes()
else:
raise HierarchyError, """Parameter exists already, try making a copy"""
raise HierarchyError("""Parameter exists already, try making a copy""")
def link_parameters(self, *parameters):
@ -189,9 +189,9 @@ class Parameterized(Parameterizable):
"""
if not param in self.parameters:
try:
raise RuntimeError, "{} does not belong to this object {}, remove parameters directly from their respective parents".format(param._short(), self.name)
raise RuntimeError("{} does not belong to this object {}, remove parameters directly from their respective parents".format(param._short(), self.name))
except AttributeError:
raise RuntimeError, "{} does not seem to be a parameter, remove parameters directly from their respective parents".format(str(param))
raise RuntimeError("{} does not seem to be a parameter, remove parameters directly from their respective parents".format(str(param)))
start = sum([p.size for p in self.parameters[:param._parent_index_]])
self._remove_parameter_name(param)
@ -215,9 +215,9 @@ class Parameterized(Parameterizable):
self._highest_parent_._notify_parent_change()
def add_parameter(self, *args, **kwargs):
raise DeprecationWarning, "add_parameter was renamed to link_parameter to avoid confusion of setting variables, use link_parameter instead"
raise DeprecationWarning("add_parameter was renamed to link_parameter to avoid confusion of setting variables, use link_parameter instead")
def remove_parameter(self, *args, **kwargs):
raise DeprecationWarning, "remove_parameter was renamed to unlink_parameter to avoid confusion of setting variables, use unlink_parameter instead"
raise DeprecationWarning("remove_parameter was renamed to unlink_parameter to avoid confusion of setting variables, use unlink_parameter instead")
def _connect_parameters(self, ignore_added_names=False):
# connect parameterlist to this parameterized object
@ -237,7 +237,7 @@ class Parameterized(Parameterizable):
self._param_slices_ = []
for i, p in enumerate(self.parameters):
if not p.param_array.flags['C_CONTIGUOUS']:
raise ValueError, "This should not happen! Please write an email to the developers with the code, which reproduces this error. All parameter arrays must be C_CONTIGUOUS"
raise ValueError("This should not happen! Please write an email to the developers with the code, which reproduces this error. All parameter arrays must be C_CONTIGUOUS")
p._parent_ = self
p._parent_index_ = i
@ -279,7 +279,7 @@ class Parameterized(Parameterizable):
else:
if paramlist is None:
paramlist = self.grep_param_names(name)
if len(paramlist) < 1: raise AttributeError, name
if len(paramlist) < 1: raise AttributeError(name)
if len(paramlist) == 1:
if isinstance(paramlist[-1], Parameterized):
paramlist = paramlist[-1].flattened_parameters
@ -295,7 +295,7 @@ class Parameterized(Parameterizable):
try:
self.param_array[name] = value
except:
raise ValueError, "Setting by slice or index only allowed with array-like"
raise ValueError("Setting by slice or index only allowed with array-like")
self.trigger_update()
else:
try: param = self.__getitem__(name, paramlist)

View file

@ -16,13 +16,13 @@ class VariationalPrior(Parameterized):
super(VariationalPrior, self).__init__(name=name, **kw)
def KL_divergence(self, variational_posterior):
raise NotImplementedError, "override this for variational inference of latent space"
raise NotImplementedError("override this for variational inference of latent space")
def update_gradients_KL(self, variational_posterior):
"""
updates the gradients for mean and variance **in place**
"""
raise NotImplementedError, "override this for variational inference of latent space"
raise NotImplementedError("override this for variational inference of latent space")
class NormalPrior(VariationalPrior):
def KL_divergence(self, variational_posterior):

View file

@ -48,7 +48,7 @@ class SparseGP(GP):
inference_method = var_dtc.VarDTC(limit=1 if not self.missing_data else Y.shape[1])
else:
#inference_method = ??
raise NotImplementedError, "what to do what to do?"
raise NotImplementedError("what to do what to do?")
print("defaulting to ", inference_method, "for latent function inference")
self.Z = Param('inducing inputs', Z)

View file

@ -223,7 +223,7 @@ class Symbolic_core():
def code_gradients_cacheable(self, function, variable):
if variable not in self.cacheable:
raise RuntimeError, variable + ' must be a cacheable.'
raise RuntimeError(variable + ' must be a cacheable.')
lcode = 'gradients_' + variable + ' = np.zeros_like(' + variable + ')\n'
lcode += 'self.update_cache(' + ', '.join(self.cacheable) + ')\n'
for i, theta in enumerate(self.variables[variable]):