aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer/rnn.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2016-03-11 13:59:46 +0800
committerDeterminant <ted.sybil@gmail.com>2016-03-11 13:59:46 +0800
commite6d28de460dfd06d696d369119247179c7a7525d (patch)
tree6263fb1555ddcba962edc31ee1312679135c06c4 /nerv/layer/rnn.lua
parenta32195e3e2ae9ca0f0c7a82e73e6bddb64568c05 (diff)
parentf26288ba61d3d16866e1b227a71e7d9c46923436 (diff)
Merge branch 'master' of https://github.com/liuq901/nerv into liuq901-master
Conflicts: nerv/layer/init.lua nerv/nn/layer_repo.lua
Diffstat (limited to 'nerv/layer/rnn.lua')
-rw-r--r--nerv/layer/rnn.lua38
1 files changed, 38 insertions, 0 deletions
diff --git a/nerv/layer/rnn.lua b/nerv/layer/rnn.lua
new file mode 100644
index 0000000..e59cf5b
--- /dev/null
+++ b/nerv/layer/rnn.lua
@@ -0,0 +1,38 @@
+local RNNLayer = nerv.class('nerv.RNNLayer', 'nerv.GraphLayer')
+
+function RNNLayer:__init(id, global_conf, layer_conf)
+ nerv.Layer.__init(self, id, global_conf, layer_conf)
+ self:check_dim_len(1, 1)
+
+ local din = layer_conf.dim_in[1]
+ local dout = layer_conf.dim_out[1]
+
+ local pr = layer_conf.pr
+ if pr == nil then
+ pr = nerv.ParamRepo({}, self.loc_type)
+ end
+
+ local layers = {
+ ['nerv.AffineLayer'] = {
+ main = {dim_in = {din, dout}, dim_out = {dout}, pr = pr},
+ },
+ ['nerv.SigmoidLayer'] = {
+ sigmoid = {dim_in = {dout}, dim_out = {dout}},
+ },
+ ['nerv.DuplicateLayer'] = {
+ dup = {dim_in = {dout}, dim_out = {dout, dout}},
+ }
+ }
+
+ local connections = {
+ {'<input>[1]', 'main[1]', 0},
+ {'main[1]', 'sigmoid[1]', 0},
+ {'sigmoid[1]', 'dup[1]', 0},
+ {'dup[1]', 'main[2]', 1},
+ {'dup[2]', '<output>[1]', 0},
+ }
+
+ self:add_prefix(layers, connections)
+ local layer_repo = nerv.LayerRepo(layers, pr, global_conf)
+ self:graph_init(layer_repo, connections)
+end