aboutsummaryrefslogtreecommitdiff
path: root/nerv/examples/lmptb/tnn/init.lua
diff options
context:
space:
mode:
Diffstat (limited to 'nerv/examples/lmptb/tnn/init.lua')
-rw-r--r--nerv/examples/lmptb/tnn/init.lua47
1 files changed, 47 insertions, 0 deletions
diff --git a/nerv/examples/lmptb/tnn/init.lua b/nerv/examples/lmptb/tnn/init.lua
new file mode 100644
index 0000000..a069527
--- /dev/null
+++ b/nerv/examples/lmptb/tnn/init.lua
@@ -0,0 +1,47 @@
+local LayerT = nerv.class('nerv.LayerT')
+
+function LayerT:__init(id, global_conf, layer_conf)
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:init(batch_size, chunk_size)
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:update(bp_err, input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:propagate(input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:back_propagate(bp_err, next_bp_err, input, output, t)
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:check_dim_len(len_in, len_out)
+ local expected_in = #self.dim_in
+ local expected_out = #self.dim_out
+ if len_in > 0 and expected_in ~= len_in then
+ nerv.error("layer %s expects %d inputs, %d given",
+ self.id, len_in, expected_in)
+ end
+ if len_out > 0 and expected_out ~= len_out then
+ nerv.error("layer %s expects %d outputs, %d given",
+ self.id, len_out, expected_out)
+ end
+end
+
+function LayerT:get_params()
+ nerv.error_method_not_implemented()
+end
+
+function LayerT:get_dim()
+ return self.dim_in, self.dim_out
+end
+
+nerv.include('tnn.lua')
+nerv.include('layersT/softmax_ce_t.lua')
+nerv.include('layers/gate_fff.lua')
+nerv.include('layer_dag_t.lua')