Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Calculate GRU input sums together #95

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 15 additions & 13 deletions model/GRU.lua
Original file line number Diff line number Diff line change
Expand Up @@ -6,38 +6,39 @@ Creates one timestep of one GRU
Paper reference: http://arxiv.org/pdf/1412.3555v1.pdf
]]--
function GRU.gru(input_size, rnn_size, n, dropout)
dropout = dropout or 0
dropout = dropout or 0

-- there are n+1 inputs (hiddens on each layer and x)
local inputs = {}
table.insert(inputs, nn.Identity()()) -- x
for L = 1,n do
table.insert(inputs, nn.Identity()()) -- prev_h[L]
end

function new_input_sum(insize, xv, hv)
local i2h = nn.Linear(insize, rnn_size)(xv)
local h2h = nn.Linear(rnn_size, rnn_size)(hv)
return nn.CAddTable()({i2h, h2h})
end

local x, input_size_L
local outputs = {}
for L = 1,n do

local prev_h = inputs[L+1]
-- the input to this layer
if L == 1 then
if L == 1 then
x = OneHot(input_size)(inputs[1])
input_size_L = input_size
else
x = outputs[(L-1)]
else
x = outputs[(L-1)]
if dropout > 0 then x = nn.Dropout(dropout)(x) end -- apply dropout, if any
input_size_L = rnn_size
end
-- GRU tick
-- evaluate the input sums at once for efficiency
local i2h = nn.Linear(input_size_L, 2 * rnn_size)(x)
local h2h = nn.Linear(rnn_size, 2 * rnn_size)(prev_h)
local all_input_sums = nn.CAddTable()({i2h, h2h})
local reshaped = nn.Reshape(2, rnn_size)(all_input_sums)
local n1, n2 = nn.SplitTable(2)(reshaped):split(2)
-- forward the update and reset gates
local update_gate = nn.Sigmoid()(new_input_sum(input_size_L, x, prev_h))
local reset_gate = nn.Sigmoid()(new_input_sum(input_size_L, x, prev_h))
local update_gate = nn.Sigmoid()(n1)
local reset_gate = nn.Sigmoid()(n2)
-- compute candidate hidden state
local gated_hidden = nn.CMulTable()({reset_gate, prev_h})
local p2 = nn.Linear(rnn_size, rnn_size)(gated_hidden)
Expand All @@ -50,7 +51,8 @@ function GRU.gru(input_size, rnn_size, n, dropout)

table.insert(outputs, next_h)
end
-- set up the decoder

-- set up the decoder
local top_h = outputs[#outputs]
if dropout > 0 then top_h = nn.Dropout(dropout)(top_h) end
local proj = nn.Linear(rnn_size, input_size)(top_h)
Expand Down