diff options
author | Determinant <ted.sybil@gmail.com> | 2016-03-11 17:35:13 +0800 |
---|---|---|
committer | Determinant <ted.sybil@gmail.com> | 2016-03-11 17:35:13 +0800 |
commit | 13729e83219cd90e33f329c49a50f6f4a4420721 (patch) | |
tree | cf5c43f1ddad7bc2430ea8191f943b0783e5fc2c /nerv/layer/rnn.lua | |
parent | a32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (diff) | |
parent | a54332ce81129e81fbb1d041ec41aa5955868c5e (diff) |
Merge branch 'liuq901-master'
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r-- | nerv/layer/rnn.lua | 38 |
1 files changed, 38 insertions, 0 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua new file mode 100644 index 0000000..e59cf5b --- /dev/null +++ b/nerv/layer/rnn.lua @@ -0,0 +1,38 @@ +local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer') + +function RNNLayer:__init(id, global_conf, layer_conf) + nerv.Layer.__init(self, id, global_conf, layer_conf) + self:check_dim_len(1, 1) + + local din = layer_conf.dim_in[1] + local dout = layer_conf.dim_out[1] + + local pr = layer_conf.pr + if pr == nil then + pr = nerv.ParamRepo({}, self.loc_type) + end + + local layers = { + ['nerv.AffineLayer'] = { + main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr}, + }, + ['nerv.SigmoidLayer'] = { + sigmoid = {dim_in = {dout}, dim_out = {dout}}, + }, + ['nerv.DuplicateLayer'] = { + dup = {dim_in = {dout}, dim_out = {dout, dout}}, + } + } + + local connections = { + {'<input>[1]', 'main[1]', 0}, + {'main[1]', 'sigmoid[1]', 0}, + {'sigmoid[1]', 'dup[1]', 0}, + {'dup[1]', 'main[2]', 1}, + {'dup[2]', '<output>[1]', 0}, + } + + self:add_prefix(layers, connections) + local layer_repo = nerv.LayerRepo(layers, pr, global_conf) + self:graph_init(layer_repo, connections) +end |