diff --git a/code/convolutional_mlp.py b/code/convolutional_mlp.py index 0d88240d..c5092b90 100644 --- a/code/convolutional_mlp.py +++ b/code/convolutional_mlp.py @@ -110,6 +110,9 @@ def __init__(self, rng, input, filter_shape, image_shape, poolsize=(2, 2)): # store parameters of this layer self.params = [self.W, self.b] + # keep track of model input + self.input = input + def evaluate_lenet5(learning_rate=0.1, n_epochs=200, dataset='mnist.pkl.gz', diff --git a/code/logistic_cg.py b/code/logistic_cg.py index 05f562a1..e2c69e87 100644 --- a/code/logistic_cg.py +++ b/code/logistic_cg.py @@ -97,6 +97,9 @@ def __init__(self, input, n_in, n_out): # symbolic form self.y_pred = T.argmax(self.p_y_given_x, axis=1) + # keep track of model input + self.input = input + def negative_log_likelihood(self, y): """Return the negative log-likelihood of the prediction of this model under a given target distribution. diff --git a/code/mlp.py b/code/mlp.py index e4b95ea8..17414d35 100644 --- a/code/mlp.py +++ b/code/mlp.py @@ -191,6 +191,9 @@ def __init__(self, rng, input, n_in, n_hidden, n_out): self.params = self.hiddenLayer.params + self.logRegressionLayer.params # end-snippet-3 + # keep track of model input + self.input = input + def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000, dataset='mnist.pkl.gz', batch_size=20, n_hidden=500):