diff options
Diffstat (limited to 'nerv/tnn/init.lua')
-rw-r--r-- | nerv/tnn/init.lua | 49 |
1 files changed, 49 insertions, 0 deletions
diff --git a/nerv/tnn/init.lua b/nerv/tnn/init.lua new file mode 100644 index 0000000..979f5d8 --- /dev/null +++ b/nerv/tnn/init.lua @@ -0,0 +1,49 @@ +local LayerT = nerv.class('nerv.LayerT') + +function LayerT:__init(id, global_conf, layer_conf) + nerv.error_method_not_implemented() +end + +function LayerT:init(batch_size, chunk_size) + nerv.error_method_not_implemented() +end + +function LayerT:update(bp_err, input, output, t) + nerv.error_method_not_implemented() +end + +function LayerT:propagate(input, output, t) + nerv.error_method_not_implemented() +end + +function LayerT:back_propagate(bp_err, next_bp_err, input, output, t) + nerv.error_method_not_implemented() +end + +function LayerT:check_dim_len(len_in, len_out) + local expected_in = #self.dim_in + local expected_out = #self.dim_out + if len_in > 0 and expected_in ~= len_in then + nerv.error("layer %s expects %d inputs, %d given", + self.id, len_in, expected_in) + end + if len_out > 0 and expected_out ~= len_out then + nerv.error("layer %s expects %d outputs, %d given", + self.id, len_out, expected_out) + end +end + +function LayerT:get_params() + nerv.error_method_not_implemented() +end + +function LayerT:get_dim() + return self.dim_in, self.dim_out +end + +nerv.include('sutil.lua') +nerv.include('tnn.lua') +nerv.include('layersT/softmax_ce_t.lua') +nerv.include('layersT/lstm_t.lua') +nerv.include('layersT/dropout_t.lua') +nerv.include('layer_dag_t.lua') |