Skip to content

Commit 48ad0d8

Browse files
committed
Merge remote-tracking branch 'upstream/master' into LiveMonitorGUI
2 parents b10552c + 8c34a73 commit 48ad0d8

File tree

4 files changed

+123
-27
lines changed

4 files changed

+123
-27
lines changed

.requirements.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# This is for readthedocs.org to be able to build the documentation,
2+
# ignore otherwise
3+
numpydoc
4+
git+git://github.com/Theano/Theano.git

pylearn2/datasets/dense_design_matrix.py

Lines changed: 48 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -753,7 +753,11 @@ def set_topological_view(self, V, axes=('b', 0, 1, 'c')):
753753
V : ndarray
754754
An array containing a design matrix representation of
755755
training examples.
756-
axes : WRITEME
756+
axes : tuple, optional
757+
The axes ordering of the provided topo_view. Must be some
758+
permutation of ('b', 0, 1, 'c') where 'b' indicates the axis
759+
indexing examples, 0 and 1 indicate the row/cols dimensions and
760+
'c' indicates the axis indexing color channels.
757761
"""
758762
if len(V.shape) != len(axes):
759763
raise ValueError("The topological view must have exactly 4 "
@@ -1169,11 +1173,14 @@ def set_topological_view(self, V, axes=('b', 0, 1, 'c'), start=0):
11691173
----------
11701174
V : ndarray
11711175
An array containing a design matrix representation of training \
1172-
examples. If unspecified, the entire dataset (`self.X`) is used \
1173-
instead.
1174-
axes : WRITEME
1175-
WRITEME
1176-
start : WRITEME
1176+
examples.
1177+
axes : tuple, optional
1178+
The axes ordering of the provided topo_view. Must be some
1179+
permutation of ('b', 0, 1, 'c') where 'b' indicates the axis
1180+
indexing examples, 0 and 1 indicate the row/cols dimensions and
1181+
'c' indicates the axis indexing color channels.
1182+
start : int
1183+
The start index to write data.
11771184
"""
11781185
assert not contains_nan(V)
11791186
rows = V.shape[axes.index(0)]
@@ -1189,11 +1196,15 @@ def set_topological_view(self, V, axes=('b', 0, 1, 'c'), start=0):
11891196

11901197
def init_hdf5(self, path, shapes):
11911198
"""
1192-
.. todo::
1199+
Initializes the hdf5 file into which the data will be stored. This must
1200+
be called before calling fill_hdf5.
11931201
1194-
WRITEME properly
1195-
1196-
Initialize hdf5 file to be used ba dataset
1202+
Parameters
1203+
----------
1204+
path : string
1205+
The name of the hdf5 file.
1206+
shapes : tuple
1207+
The shapes of X and y.
11971208
"""
11981209

11991210
x_shape, y_shape = shapes
@@ -1217,14 +1228,25 @@ def fill_hdf5(file_handle,
12171228
start=0,
12181229
batch_size=5000):
12191230
"""
1220-
.. todo::
1231+
Saves the data to the hdf5 file.
12211232
1222-
WRITEME properly
1223-
1224-
PyTables tends to crash if you write large data on them at once.
1225-
This function write data on file_handle in batches
1233+
PyTables tends to crash if you write large amounts of data into them
1234+
at once. As such this function writes data in batches.
12261235
1227-
start: the start index to write data
1236+
Parameters
1237+
----------
1238+
file_handle : hdf5 file handle
1239+
Handle to an hdf5 object.
1240+
data_x : nd array
1241+
X data. Must be the same shape as specified to init_hdf5.
1242+
data_y : nd array, optional
1243+
y data. Must be the same shape as specified to init_hdf5.
1244+
node : string, optional
1245+
The hdf5 node into which the data should be stored.
1246+
start : int
1247+
The start index to write data.
1248+
batch_size : int, optional
1249+
The size of the batch to be saved.
12281250
"""
12291251

12301252
if node is None:
@@ -1245,9 +1267,17 @@ def fill_hdf5(file_handle,
12451267

12461268
def resize(self, h5file, start, stop):
12471269
"""
1248-
.. todo::
1270+
Resizes the X and y tables. This must be called before calling
1271+
fill_hdf5.
12491272
1250-
WRITEME
1273+
Parameters
1274+
----------
1275+
h5file : hdf5 file handle
1276+
Handle to an hdf5 object.
1277+
start : int
1278+
The start index to write data.
1279+
stop : int
1280+
The index of the record following the last record to be written.
12511281
"""
12521282
ensure_tables()
12531283
# TODO is there any smarter and more efficient way to this?

pylearn2/expr/nnet.py

Lines changed: 41 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,46 @@ def arg_of_softmax(Y_hat):
114114
return z
115115

116116

117+
def arg_of_sigmoid(Y_hat):
118+
"""
119+
Given the output of a call to theano.tensor.nnet.sigmoid,
120+
returns the argument to the sigmoid (by tracing the Theano
121+
graph).
122+
123+
Parameters
124+
----------
125+
Y_hat : Variable
126+
T.nnet.sigmoid(Z)
127+
128+
Returns
129+
-------
130+
Z : Variable
131+
The variable that was passed to T.nnet.sigmoid to create `Y_hat`.
132+
Raises an error if `Y_hat` is not actually the output of a theano
133+
sigmoid.
134+
"""
135+
assert hasattr(Y_hat, 'owner')
136+
owner = Y_hat.owner
137+
assert owner is not None
138+
op = owner.op
139+
if isinstance(op, Print):
140+
assert len(owner.inputs) == 1
141+
Y_hat, = owner.inputs
142+
owner = Y_hat.owner
143+
op = owner.op
144+
success = False
145+
if isinstance(op, T.Elemwise):
146+
if isinstance(op.scalar_op, T.nnet.sigm.ScalarSigmoid):
147+
success = True
148+
if not success:
149+
raise TypeError("Expected Y_hat to be the output of a sigmoid, "
150+
"but it appears to be the output of " + str(op) +
151+
" of type " + str(type(op)))
152+
z, = owner.inputs
153+
assert z.ndim == 2
154+
return z
155+
156+
117157
def kl(Y, Y_hat, batch_axis):
118158
"""
119159
Warning: This function expects a sigmoid nonlinearity in the
@@ -323,4 +363,4 @@ def compute_f1(precision, recall):
323363
"""
324364
f1 = (2. * precision * recall /
325365
T.maximum(1, precision + recall))
326-
return f1
366+
return f1

pylearn2/expr/tests/test_nnet.py

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,12 +14,13 @@
1414
from theano import tensor as T
1515

1616
from pylearn2.models.mlp import MLP, Sigmoid
17+
from pylearn2.expr.nnet import arg_of_sigmoid
1718
from pylearn2.expr.nnet import pseudoinverse_softmax_numpy
1819
from pylearn2.expr.nnet import softmax_numpy
1920
from pylearn2.expr.nnet import softmax_ratio
2021
from pylearn2.expr.nnet import compute_recall
2122
from pylearn2.expr.nnet import kl
22-
from pylearn2.expr.nnet import elemwise_kl
23+
from pylearn2.expr.nnet import elemwise_kl
2324
from pylearn2.utils import sharedX
2425

2526

@@ -83,7 +84,7 @@ def test_kl():
8384
"""
8485
init_mode = theano.config.compute_test_value
8586
theano.config.compute_test_value = 'raise'
86-
87+
8788
try:
8889
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
8990
nvis=10)
@@ -101,7 +102,7 @@ def test_kl():
101102
np.testing.assert_raises(ValueError, kl, Y, Y_hat, 1)
102103
Y.tag.test_value[2][3] = -0.1
103104
np.testing.assert_raises(ValueError, kl, Y, Y_hat, 1)
104-
105+
105106
finally:
106107
theano.config.compute_test_value = init_mode
107108

@@ -112,10 +113,10 @@ def test_elemwise_kl():
112113
input.
113114
"""
114115
init_mode = theano.config.compute_test_value
115-
theano.config.compute_test_value = 'raise'
116-
116+
theano.config.compute_test_value = 'raise'
117+
117118
try:
118-
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
119+
mlp = MLP(layers=[Sigmoid(dim=10, layer_name='Y', irange=0.1)],
119120
nvis=10)
120121
X = mlp.get_input_space().make_theano_batch()
121122
Y = mlp.get_output_space().make_theano_batch()
@@ -131,8 +132,29 @@ def test_elemwise_kl():
131132
np.testing.assert_raises(ValueError, elemwise_kl, Y, Y_hat)
132133
Y.tag.test_value[2][3] = -0.1
133134
np.testing.assert_raises(ValueError, elemwise_kl, Y, Y_hat)
134-
135+
135136
finally:
136137
theano.config.compute_test_value = init_mode
137138

138-
139+
def test_arg_of_sigmoid_good():
140+
"""
141+
Tests that arg_of_sigmoid works when given a good input.
142+
"""
143+
144+
X = T.matrix()
145+
Y = T.nnet.sigmoid(X)
146+
Z = arg_of_sigmoid(Y)
147+
assert X is Z
148+
149+
def test_arg_of_sigmoid_bad():
150+
"""
151+
Tests that arg_of_sigmoid raises an error when given a bad input.
152+
"""
153+
154+
X = T.matrix()
155+
Y = T.nnet.softmax(X)
156+
try:
157+
Z = arg_of_sigmoid(Y)
158+
except TypeError:
159+
return
160+
assert False # Should have failed

0 commit comments

Comments
 (0)