aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/rnn/init.lua
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-11-08 17:59:27 +0800
committertxh18 <cloudygooseg@gmail.com>2015-11-08 17:59:27 +0800
commita8d369d49933ffdd306f47db6b74e0d27deda5d0 (patch)
tree8a2126153fb2363ffea94bfe8c0b9e88f11bd0ea /nerv/examples/lmptb/rnn/init.lua
parentabc36052969ab121c8a1cfa478fc14e9e8dc78a2 (diff)
trying to test softmax_ce_t
Diffstat (limited to 'nerv/examples/lmptb/rnn/init.lua')
-rw-r--r--nerv/examples/lmptb/rnn/init.lua45
1 files changed, 45 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/rnn/init.lua b/nerv/examples/lmptb/rnn/init.lua
new file mode 100644
index 0000000..0e08cb6
--- /dev/null
+++ b/nerv/examples/lmptb/rnn/init.lua
@@ -0,0 +1,45 @@
+local Layer = nerv.class('nerv.LayerT')
+
+function Layer:__init(id, global_conf, layer_conf)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:init(batch_size, chunk_size)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:update(bp_err, input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:propagate(input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:back_propagate(bp_err, next_bp_err, input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:check_dim_len(len_in, len_out)
+ local expected_in = #self.dim_in
+ local expected_out = #self.dim_out
+ if len_in > 0 and expected_in ~= len_in then
+ nerv.error("layer %s expects %d inputs, %d given",
+ self.id, len_in, expected_in)
+ end
+ if len_out > 0 and expected_out ~= len_out then
+ nerv.error("layer %s expects %d outputs, %d given",
+ self.id, len_out, expected_out)
+ end
+end
+
+function Layer:get_params()
+ nerv.error_method_not_implemented()
+end
+
+function Layer:get_dim()
+ return self.dim_in, self.dim_out
+end
+
+nerv.include('tnn.lua')
+nerv.include('softmax_ce_t.lua')