@@ -152,13 +152,10 @@ def __init__(self, n_visible= 784, n_hidden= 500, input= None, corruption_level
152152
153153 :param input: a symbolic description of the input or None
154154
155- :param corruption_level: the corruption mechanism picks up randomly this fraction
156- of entries of the input and turns them to 0
155+ :param corruption_level: the corruption mechanism picks up randomly this
156+ fraction of entries of the input and turns them to 0
157157
158158
159- amount of entries from the input to 0 from the input, defaul
160- is 0.1, which means 10% of entries are corrupted to 0
161-
162159 """
163160 self .n_visible = n_visible
164161 self .n_hidden = n_hidden
@@ -206,7 +203,8 @@ def __init__(self, n_visible= 784, n_hidden= 500, input= None, corruption_level
206203 # third argument is the probability of success of any trial
207204 #
208205 # this will produce an array of 0s and 1s where 1 has a
209- # probability of 1 - corruption_level and 0 if corruption_level
206+ # probability of 1 - ``corruption_level`` and 0 with
207+ # ``corruption_level``
210208 self .tilde_x = theano_rng .binomial ( self .x .shape , 1 , 1 - corruption_level ) * self .x
211209 # Equation (2)
212210 # note : y is stored as an attribute of the class so that it can be
@@ -244,7 +242,8 @@ class SdA():
244242 the dAs are only used to initialize the weights.
245243 """
246244
247- def __init__ (self , input , n_ins , hidden_layers_sizes , n_outs ):
245+ def __init__ (self , input , n_ins , hidden_layers_sizes , n_outs ,
246+ corruption_levels ):
248247 """ This class is made to support a variable number of layers.
249248
250249 :param input: symbolic variable describing the input of the SdA
@@ -255,6 +254,9 @@ def __init__(self, input, n_ins, hidden_layers_sizes, n_outs):
255254 at least one value
256255
257256 :param n_outs: dimension of the output of the network
257+
258+ :param corruption_levels: amount of corruption to use for each
259+ layer
258260 """
259261
260262 self .layers = []
@@ -263,7 +265,8 @@ def __init__(self, input, n_ins, hidden_layers_sizes, n_outs):
263265 raiseException (' You must have at least one hidden layer ' )
264266
265267 # add first layer:
266- layer = dA (n_ins , hidden_layers_sizes [0 ], input = input )
268+ layer = dA (n_ins , hidden_layers_sizes [0 ], input = input , \
269+ corruption_level = corruption_levels [0 ])
267270 self .layers += [layer ]
268271 # add all intermidiate layers
269272 for i in xrange ( 1 , len (hidden_layers_sizes ) ):
@@ -272,7 +275,8 @@ def __init__(self, input, n_ins, hidden_layers_sizes, n_outs):
272275 # of layers `self.layers`
273276 layer = dA ( hidden_layers_sizes [i - 1 ], \
274277 hidden_layers_sizes [i ], \
275- input = self .layers [- 1 ].hidden_values )
278+ input = self .layers [- 1 ].hidden_values ,\
279+ corruption_level = corruption_levels [i ])
276280 self .layers += [layer ]
277281
278282
@@ -309,7 +313,8 @@ def errors(self, y):
309313
310314
311315def sgd_optimization_mnist ( learning_rate = 0.1 , pretraining_epochs = 15 , \
312- pretraining_lr = 0.1 , training_epochs = 1000 , dataset = 'mnist.pkl.gz' ):
316+ pretraining_lr = 0.1 , training_epochs = 1000 , \
317+ dataset = 'mnist.pkl.gz' ):
313318 """
314319 Demonstrate stochastic gradient descent optimization for a multilayer
315320 perceptron
@@ -363,7 +368,8 @@ def shared_dataset(data_xy):
363368
364369 # construct the logistic regression class
365370 classifier = SdA ( input = x , n_ins = 28 * 28 , \
366- hidden_layers_sizes = [1000 , 1000 , 1000 ], n_outs = 10 )
371+ hidden_layers_sizes = [1000 , 1000 , 1000 ], n_outs = 10 , \
372+ corruption_levels = [ 0.1 , 0.1 , 0.1 ])
367373
368374
369375 start_time = time .clock ()
0 commit comments