Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion code/DBN.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def pretraining_functions(self, train_set_x, batch_size, k):

# compile the theano function
fn = theano.function(
inputs=[index, theano.Param(learning_rate, default=0.1)],
inputs=[index, theano.In(learning_rate, value=0.1)],
outputs=cost,
updates=updates,
givens={
Expand Down
2 changes: 1 addition & 1 deletion code/convolutional_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def __init__(self, rng, input, filter_shape, image_shape, poolsize=(2, 2)):
input=input,
filters=self.W,
filter_shape=filter_shape,
image_shape=image_shape
input_shape=image_shape
)

# downsample each feature map individually, using maxpooling
Expand Down
6 changes: 4 additions & 2 deletions code/rbm.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,8 @@ def get_cost_updates(self, lr=0.1, persistent=None, k=1):
# chain_start is the initial state corresponding to the
# 6th output
outputs_info=[None, None, None, None, None, chain_start],
n_steps=k
n_steps=k,
name="gibbs_hvh"
)
# start-snippet-3
# determine gradients on RBM parameters
Expand Down Expand Up @@ -496,7 +497,8 @@ def test_rbm(learning_rate=0.1, training_epochs=15,
) = theano.scan(
rbm.gibbs_vhv,
outputs_info=[None, None, None, None, None, persistent_vis_chain],
n_steps=plot_every
n_steps=plot_every,
name="gibbs_vhv"
)

# add to updates the shared variable that takes care of our persistent
Expand Down
2 changes: 1 addition & 1 deletion data/download.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ WGET=$?
which curl >/dev/null 2>&1
CURL=$?
if [ "$WGET" -eq 0 ]; then
DL_CMD="wget -c"
DL_CMD="wget --no-verbose -c"
elif [ "$CURL" -eq 0 ]; then
DL_CMD="curl -C - -O"
else
Expand Down
6 changes: 3 additions & 3 deletions doc/lenet.txt
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ one of Figure 1. The input consists of 3 features maps (an RGB color image) of s

import theano
from theano import tensor as T
from theano.tensor.nnet import conv
from theano.tensor.nnet import conv2d

import numpy

Expand Down Expand Up @@ -226,7 +226,7 @@ one of Figure 1. The input consists of 3 features maps (an RGB color image) of s
dtype=input.dtype), name ='b')

# build symbolic expression that computes the convolution of input with filters in w
conv_out = conv.conv2d(input, W)
conv_out = conv2d(input, W)

# build symbolic expression to add bias and apply activation function, i.e. produce neural net layer output
# A few words on ``dimshuffle`` :
Expand Down Expand Up @@ -404,7 +404,7 @@ to be compatible with our previous MLP implementation.
Note that the term "convolution" could corresponds to different mathematical operations:

1. `theano.tensor.nnet.conv2d
<http://deeplearning.net/software/theano/library/tensor/nnet/conv.html#theano.tensor.nnet.conv.conv2d>`_,
<http://deeplearning.net/software/theano/library/tensor/nnet/conv.html#theano.tensor.nnet.conv2d>`_,
which is the most common one in almost all of the recent published
convolutional models.
In this operation, each output feature map is connected to each
Expand Down