diff --git a/GPy/examples/dimensionality_reduction.py b/GPy/examples/dimensionality_reduction.py
index 005b131f..8aaeb4ae 100644
--- a/GPy/examples/dimensionality_reduction.py
+++ b/GPy/examples/dimensionality_reduction.py
@@ -327,31 +327,52 @@ def mrd_simulation(optimize=True, plot=True, plot_sim=True, **kw):
m.plot_scales("MRD Scales")
return m
+
+
def brendan_faces():
from GPy import kern
data = GPy.util.datasets.brendan_faces()
Q = 2
- Y = data['Y'][0:-1:10, :]
- # Y = data['Y']
+ Y = data['Y']
Yn = Y - Y.mean()
Yn /= Yn.std()
m = GPy.models.GPLVM(Yn, Q)
- # m = GPy.models.BayesianGPLVM(Yn, Q, num_inducing=100)
# optimize
m.constrain('rbf|noise|white', GPy.core.transformations.logexp_clipped())
- m.optimize('scg', messages=1, max_f_eval=10000)
+ m.optimize('scg', messages=1, max_iters=10)
ax = m.plot_latent(which_indices=(0, 1))
y = m.likelihood.Y[0, :]
- data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, invert=False, scale=False)
+ data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(20, 28), transpose=True, order='F', invert=False, scale=False)
lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
raw_input('Press enter to finish')
return m
+
+def olivetti_faces():
+ from GPy import kern
+ data = GPy.util.datasets.olivetti_faces()
+ Q = 2
+ Y = data['Y']
+ Yn = Y - Y.mean()
+ Yn /= Yn.std()
+
+ m = GPy.models.GPLVM(Yn, Q)
+ m.optimize('scg', messages=1, max_iters=1000)
+
+ ax = m.plot_latent(which_indices=(0, 1))
+ y = m.likelihood.Y[0, :]
+ data_show = GPy.util.visualize.image_show(y[None, :], dimensions=(112, 92), transpose=False, invert=False, scale=False)
+ lvm_visualizer = GPy.util.visualize.lvm(m.X[0, :].copy(), m, data_show, ax)
+ raw_input('Press enter to finish')
+
+ return m
+
def stick_play(range=None, frame_rate=15):
+
data = GPy.util.datasets.osu_run1()
# optimize
if range == None:
diff --git a/GPy/util/__init__.py b/GPy/util/__init__.py
index 99548268..db9b7362 100644
--- a/GPy/util/__init__.py
+++ b/GPy/util/__init__.py
@@ -14,3 +14,5 @@ import visualize
import decorators
import classification
import latent_space_visualizations
+
+import netpbmfile
diff --git a/GPy/util/datasets.py b/GPy/util/datasets.py
index 2ff168b3..45ed694c 100644
--- a/GPy/util/datasets.py
+++ b/GPy/util/datasets.py
@@ -8,17 +8,12 @@ import zipfile
import tarfile
import datetime
-ipython_notebook = False
-if ipython_notebook:
- import IPython.core.display
- def ipynb_input(varname, prompt=''):
- """Prompt user for input and assign string val to given variable name."""
- js_code = ("""
- var value = prompt("{prompt}","");
- var py_code = "{varname} = '" + value + "'";
- IPython.notebook.kernel.execute(py_code);
- """).format(prompt=prompt, varname=varname)
- return IPython.core.display.Javascript(js_code)
+ipython_available=True
+try:
+ import IPython
+except ImportError:
+ ipython_available=False
+
import sys, urllib
@@ -34,8 +29,11 @@ data_path = os.path.join(os.path.dirname(__file__), 'datasets')
default_seed = 10000
overide_manual_authorize=False
neil_url = 'http://staffwww.dcs.shef.ac.uk/people/N.Lawrence/dataset_mirror/'
+sam_url = 'http://www.cs.nyu.edu/~roweis/data/'
cmu_url = 'http://mocap.cs.cmu.edu/subjects/'
-# Note: there may be a better way of storing data resources. One of the pythonistas will need to take a look.
+
+# Note: there may be a better way of storing data resources, for the
+# moment we are storing them in a dictionary.
data_resources = {'ankur_pose_data' : {'urls' : [neil_url + 'ankur_pose_data/'],
'files' : [['ankurDataPoseSilhouette.mat']],
'license' : None,
@@ -49,7 +47,7 @@ data_resources = {'ankur_pose_data' : {'urls' : [neil_url + 'ankur_pose_data/'],
'license' : None,
'size' : 51276
},
- 'brendan_faces' : {'urls' : ['http://www.cs.nyu.edu/~roweis/data/'],
+ 'brendan_faces' : {'urls' : [sam_url],
'files': [['frey_rawface.mat']],
'citation' : 'Frey, B. J., Colmenarez, A and Huang, T. S. Mixtures of Local Linear Subspaces for Face Recognition. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition 1998, 32-37, June 1998. Computer Society Press, Los Alamitos, CA.',
'details' : """A video of Brendan Frey's face popularized as a benchmark for visualization by the Locally Linear Embedding.""",
@@ -93,6 +91,12 @@ The database was created with funding from NSF EIA-0196217.""",
'details' : """Data from the textbook 'A First Course in Machine Learning'. Available from http://www.dcs.gla.ac.uk/~srogers/firstcourseml/.""",
'license' : None,
'size' : 21949154},
+ 'olivetti_faces' : {'urls' : [neil_url + 'olivetti_faces/', sam_url],
+ 'files' : [['att_faces.zip'], ['olivettifaces.mat']],
+ 'citation' : 'Ferdinando Samaria and Andy Harter, Parameterisation of a Stochastic Model for Human Face Identification. Proceedings of 2nd IEEE Workshop on Applications of Computer Vision, Sarasota FL, December 1994',
+ 'details' : """Olivetti Research Labs Face data base, acquired between December 1992 and December 1994 in the Olivetti Research Lab, Cambridge (which later became AT&T Laboratories, Cambridge). When using these images please give credit to AT&T Laboratories, Cambridge. """,
+ 'license': None,
+ 'size' : 8561331},
'olympic_marathon_men' : {'urls' : [neil_url + 'olympic_marathon_men/'],
'files' : [['olympicMarathonTimes.csv']],
'citation' : None,
@@ -144,23 +148,32 @@ The database was created with funding from NSF EIA-0196217.""",
}
-def prompt_user():
+def prompt_user(prompt):
"""Ask user for agreeing to data set licenses."""
# raw_input returns the empty string for "enter"
yes = set(['yes', 'y'])
no = set(['no','n'])
- choice = ''
- if ipython_notebook:
- ipynb_input(choice, prompt='provide your answer here')
- else:
+
+ try:
+ print(prompt)
choice = raw_input().lower()
+ # would like to test for exception here, but not sure if we can do that without importing IPython
+ except:
+ print('Stdin is not implemented.')
+ print('You need to set')
+ print('overide_manual_authorize=True')
+ print('to proceed with the download. Please set that variable and continue.')
+ raise
+
+
if choice in yes:
return True
elif choice in no:
return False
else:
- sys.stdout.write("Please respond with 'yes', 'y' or 'no', 'n'")
- return prompt_user()
+ print("Your response was a " + choice)
+ print("Please respond with 'yes', 'y' or 'no', 'n'")
+ #return prompt_user()
def data_available(dataset_name=None):
@@ -212,15 +225,14 @@ def authorize_download(dataset_name=None):
print('You must also agree to the following license:')
print(dr['license'])
print('')
- print('Do you wish to proceed with the download? [yes/no]')
- return prompt_user()
+ return prompt_user('Do you wish to proceed with the download? [yes/no]')
def download_data(dataset_name=None):
"""Check with the user that the are happy with terms and conditions for the data set, then download it."""
dr = data_resources[dataset_name]
if not authorize_download(dataset_name):
- return False
+ raise Exception("Permission to download data set denied.")
if dr.has_key('suffices'):
for url, files, suffices in zip(dr['urls'], dr['files'], dr['suffices']):
@@ -489,12 +501,12 @@ def ripley_synth(data_set='ripley_prnn_data'):
return data_details_return({'X': X, 'y': y, 'Xtest': Xtest, 'ytest': ytest, 'info': 'Synthetic data generated by Ripley for a two class classification problem.'}, data_set)
def osu_run1(data_set='osu_run1', sample_every=4):
+ path = os.path.join(data_path, data_set)
if not data_available(data_set):
download_data(data_set)
- zip = zipfile.ZipFile(os.path.join(data_path, data_set, 'run1TXT.ZIP'), 'r')
- path = os.path.join(data_path, data_set)
- for name in zip.namelist():
- zip.extract(name, path)
+ zip = zipfile.ZipFile(os.path.join(data_path, data_set, 'run1TXT.ZIP'), 'r')
+ for name in zip.namelist():
+ zip.extract(name, path)
Y, connect = GPy.util.mocap.load_text_data('Aug210106', path)
Y = Y[0:-1:sample_every, :]
return data_details_return({'Y': Y, 'connect' : connect}, data_set)
@@ -579,6 +591,24 @@ def toy_linear_1d_classification(seed=default_seed):
X = (np.r_[x1, x2])[:, None]
return {'X': X, 'Y': sample_class(2.*X), 'F': 2.*X, 'seed' : seed}
+def olivetti_faces(data_set='olivetti_faces'):
+ path = os.path.join(data_path, data_set)
+ if not data_available(data_set):
+ download_data(data_set)
+ zip = zipfile.ZipFile(os.path.join(path, 'att_faces.zip'), 'r')
+ for name in zip.namelist():
+ zip.extract(name, path)
+ Y = []
+ lbls = []
+ for subject in range(40):
+ for image in range(10):
+ image_path = os.path.join(path, 'orl_faces', 's'+str(subject+1), str(image+1) + '.pgm')
+ Y.append(GPy.util.netpbmfile.imread(image_path).flatten())
+ lbls.append(subject)
+ Y = np.asarray(Y)
+ lbls = np.asarray(lbls)[:, None]
+ return data_details_return({'Y': Y, 'lbls' : lbls, 'info': "ORL Faces processed to 64x64 images."}, data_set)
+
def olympic_100m_men(data_set='rogers_girolami_data'):
if not data_available(data_set):
download_data(data_set)
@@ -586,7 +616,8 @@ def olympic_100m_men(data_set='rogers_girolami_data'):
tar_file = os.path.join(path, 'firstcoursemldata.tar.gz')
tar = tarfile.open(tar_file)
print('Extracting file.')
- tar.extractall(path=path)
+ tar.extractall(path=path)
+
tar.close()
olympic_data = scipy.io.loadmat(os.path.join(data_path, data_set, 'data', 'olympics.mat'))['male100']
diff --git a/GPy/util/netpbmfile.py b/GPy/util/netpbmfile.py
new file mode 100644
index 00000000..030bd574
--- /dev/null
+++ b/GPy/util/netpbmfile.py
@@ -0,0 +1,331 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# netpbmfile.py
+
+# Copyright (c) 2011-2013, Christoph Gohlke
+# Copyright (c) 2011-2013, The Regents of the University of California
+# Produced at the Laboratory for Fluorescence Dynamics.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+# * Neither the name of the copyright holders nor the names of any
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+
+"""Read and write image data from respectively to Netpbm files.
+
+This implementation follows the Netpbm format specifications at
+http://netpbm.sourceforge.net/doc/. No gamma correction is performed.
+
+The following image formats are supported: PBM (bi-level), PGM (grayscale),
+PPM (color), PAM (arbitrary), XV thumbnail (RGB332, read-only).
+
+:Author:
+ `Christoph Gohlke `_
+
+:Organization:
+ Laboratory for Fluorescence Dynamics, University of California, Irvine
+
+:Version: 2013.01.18
+
+Requirements
+------------
+* `CPython 2.7, 3.2 or 3.3 `_
+* `Numpy 1.7 `_
+* `Matplotlib 1.2 `_ (optional for plotting)
+
+Examples
+--------
+>>> im1 = numpy.array([[0, 1],[65534, 65535]], dtype=numpy.uint16)
+>>> imsave('_tmp.pgm', im1)
+>>> im2 = imread('_tmp.pgm')
+>>> assert numpy.all(im1 == im2)
+
+"""
+
+from __future__ import division, print_function
+
+import sys
+import re
+import math
+from copy import deepcopy
+
+import numpy
+
+__version__ = '2013.01.18'
+__docformat__ = 'restructuredtext en'
+__all__ = ['imread', 'imsave', 'NetpbmFile']
+
+
+def imread(filename, *args, **kwargs):
+ """Return image data from Netpbm file as numpy array.
+
+ `args` and `kwargs` are arguments to NetpbmFile.asarray().
+
+ Examples
+ --------
+ >>> image = imread('_tmp.pgm')
+
+ """
+ try:
+ netpbm = NetpbmFile(filename)
+ image = netpbm.asarray()
+ finally:
+ netpbm.close()
+ return image
+
+
+def imsave(filename, data, maxval=None, pam=False):
+ """Write image data to Netpbm file.
+
+ Examples
+ --------
+ >>> image = numpy.array([[0, 1],[65534, 65535]], dtype=numpy.uint16)
+ >>> imsave('_tmp.pgm', image)
+
+ """
+ try:
+ netpbm = NetpbmFile(data, maxval=maxval)
+ netpbm.write(filename, pam=pam)
+ finally:
+ netpbm.close()
+
+
+class NetpbmFile(object):
+ """Read and write Netpbm PAM, PBM, PGM, PPM, files."""
+
+ _types = {b'P1': b'BLACKANDWHITE', b'P2': b'GRAYSCALE', b'P3': b'RGB',
+ b'P4': b'BLACKANDWHITE', b'P5': b'GRAYSCALE', b'P6': b'RGB',
+ b'P7 332': b'RGB', b'P7': b'RGB_ALPHA'}
+
+ def __init__(self, arg=None, **kwargs):
+ """Initialize instance from filename, open file, or numpy array."""
+ for attr in ('header', 'magicnum', 'width', 'height', 'maxval',
+ 'depth', 'tupltypes', '_filename', '_fh', '_data'):
+ setattr(self, attr, None)
+ if arg is None:
+ self._fromdata([], **kwargs)
+ elif isinstance(arg, basestring):
+ self._fh = open(arg, 'rb')
+ self._filename = arg
+ self._fromfile(self._fh, **kwargs)
+ elif hasattr(arg, 'seek'):
+ self._fromfile(arg, **kwargs)
+ self._fh = arg
+ else:
+ self._fromdata(arg, **kwargs)
+
+ def asarray(self, copy=True, cache=False, **kwargs):
+ """Return image data from file as numpy array."""
+ data = self._data
+ if data is None:
+ data = self._read_data(self._fh, **kwargs)
+ if cache:
+ self._data = data
+ else:
+ return data
+ return deepcopy(data) if copy else data
+
+ def write(self, arg, **kwargs):
+ """Write instance to file."""
+ if hasattr(arg, 'seek'):
+ self._tofile(arg, **kwargs)
+ else:
+ with open(arg, 'wb') as fid:
+ self._tofile(fid, **kwargs)
+
+ def close(self):
+ """Close open file. Future asarray calls might fail."""
+ if self._filename and self._fh:
+ self._fh.close()
+ self._fh = None
+
+ def __del__(self):
+ self.close()
+
+ def _fromfile(self, fh):
+ """Initialize instance from open file."""
+ fh.seek(0)
+ data = fh.read(4096)
+ if (len(data) < 7) or not (b'0' < data[1:2] < b'8'):
+ raise ValueError("Not a Netpbm file:\n%s" % data[:32])
+ try:
+ self._read_pam_header(data)
+ except Exception:
+ try:
+ self._read_pnm_header(data)
+ except Exception:
+ raise ValueError("Not a Netpbm file:\n%s" % data[:32])
+
+ def _read_pam_header(self, data):
+ """Read PAM header and initialize instance."""
+ regroups = re.search(
+ b"(^P7[\n\r]+(?:(?:[\n\r]+)|(?:#.*)|"
+ b"(HEIGHT\s+\d+)|(WIDTH\s+\d+)|(DEPTH\s+\d+)|(MAXVAL\s+\d+)|"
+ b"(?:TUPLTYPE\s+\w+))*ENDHDR\n)", data).groups()
+ self.header = regroups[0]
+ self.magicnum = b'P7'
+ for group in regroups[1:]:
+ key, value = group.split()
+ setattr(self, unicode(key).lower(), int(value))
+ matches = re.findall(b"(TUPLTYPE\s+\w+)", self.header)
+ self.tupltypes = [s.split(None, 1)[1] for s in matches]
+
+ def _read_pnm_header(self, data):
+ """Read PNM header and initialize instance."""
+ bpm = data[1:2] in b"14"
+ regroups = re.search(b"".join((
+ b"(^(P[123456]|P7 332)\s+(?:#.*[\r\n])*",
+ b"\s*(\d+)\s+(?:#.*[\r\n])*",
+ b"\s*(\d+)\s+(?:#.*[\r\n])*" * (not bpm),
+ b"\s*(\d+)\s(?:\s*#.*[\r\n]\s)*)")), data).groups() + (1, ) * bpm
+ self.header = regroups[0]
+ self.magicnum = regroups[1]
+ self.width = int(regroups[2])
+ self.height = int(regroups[3])
+ self.maxval = int(regroups[4])
+ self.depth = 3 if self.magicnum in b"P3P6P7 332" else 1
+ self.tupltypes = [self._types[self.magicnum]]
+
+ def _read_data(self, fh, byteorder='>'):
+ """Return image data from open file as numpy array."""
+ fh.seek(len(self.header))
+ data = fh.read()
+ dtype = 'u1' if self.maxval < 256 else byteorder + 'u2'
+ depth = 1 if self.magicnum == b"P7 332" else self.depth
+ shape = [-1, self.height, self.width, depth]
+ size = numpy.prod(shape[1:])
+ if self.magicnum in b"P1P2P3":
+ data = numpy.array(data.split(None, size)[:size], dtype)
+ data = data.reshape(shape)
+ elif self.maxval == 1:
+ shape[2] = int(math.ceil(self.width / 8))
+ data = numpy.frombuffer(data, dtype).reshape(shape)
+ data = numpy.unpackbits(data, axis=-2)[:, :, :self.width, :]
+ else:
+ data = numpy.frombuffer(data, dtype)
+ data = data[:size * (data.size // size)].reshape(shape)
+ if data.shape[0] < 2:
+ data = data.reshape(data.shape[1:])
+ if data.shape[-1] < 2:
+ data = data.reshape(data.shape[:-1])
+ if self.magicnum == b"P7 332":
+ rgb332 = numpy.array(list(numpy.ndindex(8, 8, 4)), numpy.uint8)
+ rgb332 *= [36, 36, 85]
+ data = numpy.take(rgb332, data, axis=0)
+ return data
+
+ def _fromdata(self, data, maxval=None):
+ """Initialize instance from numpy array."""
+ data = numpy.array(data, ndmin=2, copy=True)
+ if data.dtype.kind not in "uib":
+ raise ValueError("not an integer type: %s" % data.dtype)
+ if data.dtype.kind == 'i' and numpy.min(data) < 0:
+ raise ValueError("data out of range: %i" % numpy.min(data))
+ if maxval is None:
+ maxval = numpy.max(data)
+ maxval = 255 if maxval < 256 else 65535
+ if maxval < 0 or maxval > 65535:
+ raise ValueError("data out of range: %i" % maxval)
+ data = data.astype('u1' if maxval < 256 else '>u2')
+ self._data = data
+ if data.ndim > 2 and data.shape[-1] in (3, 4):
+ self.depth = data.shape[-1]
+ self.width = data.shape[-2]
+ self.height = data.shape[-3]
+ self.magicnum = b'P7' if self.depth == 4 else b'P6'
+ else:
+ self.depth = 1
+ self.width = data.shape[-1]
+ self.height = data.shape[-2]
+ self.magicnum = b'P5' if maxval > 1 else b'P4'
+ self.maxval = maxval
+ self.tupltypes = [self._types[self.magicnum]]
+ self.header = self._header()
+
+ def _tofile(self, fh, pam=False):
+ """Write Netbm file."""
+ fh.seek(0)
+ fh.write(self._header(pam))
+ data = self.asarray(copy=False)
+ if self.maxval == 1:
+ data = numpy.packbits(data, axis=-1)
+ data.tofile(fh)
+
+ def _header(self, pam=False):
+ """Return file header as byte string."""
+ if pam or self.magicnum == b'P7':
+ header = "\n".join((
+ "P7",
+ "HEIGHT %i" % self.height,
+ "WIDTH %i" % self.width,
+ "DEPTH %i" % self.depth,
+ "MAXVAL %i" % self.maxval,
+ "\n".join("TUPLTYPE %s" % unicode(i) for i in self.tupltypes),
+ "ENDHDR\n"))
+ elif self.maxval == 1:
+ header = "P4 %i %i\n" % (self.width, self.height)
+ elif self.depth == 1:
+ header = "P5 %i %i %i\n" % (self.width, self.height, self.maxval)
+ else:
+ header = "P6 %i %i %i\n" % (self.width, self.height, self.maxval)
+ if sys.version_info[0] > 2:
+ header = bytes(header, 'ascii')
+ return header
+
+ def __str__(self):
+ """Return information about instance."""
+ return unicode(self.header)
+
+
+if sys.version_info[0] > 2:
+ basestring = str
+ unicode = lambda x: str(x, 'ascii')
+
+if __name__ == "__main__":
+ # Show images specified on command line or all images in current directory
+ from glob import glob
+ from matplotlib import pyplot
+ files = sys.argv[1:] if len(sys.argv) > 1 else glob('*.p*m')
+ for fname in files:
+ try:
+ pam = NetpbmFile(fname)
+ img = pam.asarray(copy=False)
+ if False:
+ pam.write('_tmp.pgm.out', pam=True)
+ img2 = imread('_tmp.pgm.out')
+ assert numpy.all(img == img2)
+ imsave('_tmp.pgm.out', img)
+ img2 = imread('_tmp.pgm.out')
+ assert numpy.all(img == img2)
+ pam.close()
+ except ValueError as e:
+ print(fname, e)
+ continue
+ _shape = img.shape
+ if img.ndim > 3 or (img.ndim > 2 and img.shape[-1] not in (3, 4)):
+ img = img[0]
+ cmap = 'gray' if pam.maxval > 1 else 'binary'
+ pyplot.imshow(img, cmap, interpolation='nearest')
+ pyplot.title("%s %s %s %s" % (fname, unicode(pam.magicnum),
+ _shape, img.dtype))
+ pyplot.show()
diff --git a/GPy/util/visualize.py b/GPy/util/visualize.py
index 7a519555..ecdf78ce 100644
--- a/GPy/util/visualize.py
+++ b/GPy/util/visualize.py
@@ -246,17 +246,36 @@ class lvm_dimselect(lvm):
class image_show(matplotlib_show):
- """Show a data vector as an image."""
- def __init__(self, vals, axes=None, dimensions=(16,16), transpose=False, invert=False, scale=False, palette=[], presetMean = 0., presetSTD = -1., selectImage=0):
+ """Show a data vector as an image. This visualizer rehapes the output vector and displays it as an image.
+
+ :param vals: the values of the output to display.
+ :type vals: ndarray
+ :param axes: the axes to show the output on.
+ :type vals: axes handle
+ :param dimensions: the dimensions that the image needs to be transposed to for display.
+ :type dimensions: tuple
+ :param transpose: whether to transpose the image before display.
+ :type bool: default is False.
+ :param order: whether array is in Fortan ordering ('F') or Python ordering ('C'). Default is python ('C').
+ :type order: string
+ :param invert: whether to invert the pixels or not (default False).
+ :type invert: bool
+ :param palette: a palette to use for the image.
+ :param preset_mean: the preset mean of a scaled image.
+ :type preset_mean: double
+ :param preset_std: the preset standard deviation of a scaled image.
+ :type preset_std: double"""
+ def __init__(self, vals, axes=None, dimensions=(16,16), transpose=False, order='C', invert=False, scale=False, palette=[], preset_mean = 0., preset_std = -1., select_image=0):
matplotlib_show.__init__(self, vals, axes)
self.dimensions = dimensions
self.transpose = transpose
+ self.order = order
self.invert = invert
self.scale = scale
self.palette = palette
- self.presetMean = presetMean
- self.presetSTD = presetSTD
- self.selectImage = selectImage # This is used when the y vector contains multiple images concatenated.
+ self.preset_mean = preset_mean
+ self.preset_std = preset_std
+ self.select_image = select_image # This is used when the y vector contains multiple images concatenated.
self.set_image(self.vals)
if not self.palette == []: # Can just show the image (self.set_image() took care of setting the palette)
@@ -272,22 +291,22 @@ class image_show(matplotlib_show):
def set_image(self, vals):
dim = self.dimensions[0] * self.dimensions[1]
- nImg = np.sqrt(vals[0,].size/dim)
- if nImg > 1 and nImg.is_integer(): # Show a mosaic of images
- nImg = np.int(nImg)
- self.vals = np.zeros((self.dimensions[0]*nImg, self.dimensions[1]*nImg))
- for iR in range(nImg):
- for iC in range(nImg):
- currImgId = iR*nImg + iC
- currImg = np.reshape(vals[0,dim*currImgId+np.array(range(dim))], self.dimensions, order='F')
- firstRow = iR*self.dimensions[0]
- lastRow = (iR+1)*self.dimensions[0]
- firstCol = iC*self.dimensions[1]
- lastCol = (iC+1)*self.dimensions[1]
- self.vals[firstRow:lastRow, firstCol:lastCol] = currImg
+ num_images = np.sqrt(vals[0,].size/dim)
+ if num_images > 1 and num_images.is_integer(): # Show a mosaic of images
+ num_images = np.int(num_images)
+ self.vals = np.zeros((self.dimensions[0]*num_images, self.dimensions[1]*num_images))
+ for iR in range(num_images):
+ for iC in range(num_images):
+ cur_img_id = iR*num_images + iC
+ cur_img = np.reshape(vals[0,dim*cur_img_id+np.array(range(dim))], self.dimensions, order=self.order)
+ first_row = iR*self.dimensions[0]
+ last_row = (iR+1)*self.dimensions[0]
+ first_col = iC*self.dimensions[1]
+ last_col = (iC+1)*self.dimensions[1]
+ self.vals[first_row:last_row, first_col:last_col] = cur_img
else:
- self.vals = np.reshape(vals[0,dim*self.selectImage+np.array(range(dim))], self.dimensions, order='F')
+ self.vals = np.reshape(vals[0,dim*self.select_image+np.array(range(dim))], self.dimensions, order=self.order)
if self.transpose:
self.vals = self.vals.T
# if not self.scale:
@@ -296,8 +315,8 @@ class image_show(matplotlib_show):
self.vals = -self.vals
# un-normalizing, for visualisation purposes:
- if self.presetSTD >= 0: # The Mean is assumed to be in the range (0,255)
- self.vals = self.vals*self.presetSTD + self.presetMean
+ if self.preset_std >= 0: # The Mean is assumed to be in the range (0,255)
+ self.vals = self.vals*self.preset_std + self.preset_mean
# Clipping the values:
self.vals[self.vals < 0] = 0
self.vals[self.vals > 255] = 255