mirror of
https://github.com/SheffieldML/GPy.git
synced 2026-05-02 08:12:39 +02:00
Automatically fetch datasets and first init. attempt for mocap
This commit is contained in:
parent
afcb30dfbe
commit
9041d28ab2
2 changed files with 27 additions and 0 deletions
|
|
@ -5,6 +5,8 @@ import GPy
|
|||
import scipy.sparse
|
||||
import scipy.io
|
||||
import cPickle as pickle
|
||||
import urllib2 as url
|
||||
|
||||
data_path = os.path.join(os.path.dirname(__file__), 'datasets')
|
||||
default_seed = 10000
|
||||
|
||||
|
|
@ -15,6 +17,18 @@ def sample_class(f):
|
|||
c = np.where(c, 1, -1)
|
||||
return c
|
||||
|
||||
def fetch_dataset(resource, file_name, messages = True):
|
||||
if messages:
|
||||
print "Downloading resource: " , resource, " ... "
|
||||
response = url.urlopen(resource)
|
||||
# TODO: Some error checking...
|
||||
html = response.read()
|
||||
response.close()
|
||||
with open(file_name, "w") as text_file:
|
||||
text_file.write("%s"%html)
|
||||
if messages:
|
||||
print "Done!"
|
||||
|
||||
def della_gatta_TRP63_gene_expression(gene_number=None):
|
||||
mat_data = scipy.io.loadmat(os.path.join(data_path, 'DellaGattadata.mat'))
|
||||
X = np.double(mat_data['timepoints'])
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue