diff options
Diffstat (limited to 'nerv/examples/lmptb/rnn/init.lua')
-rw-r--r-- | nerv/examples/lmptb/rnn/init.lua | 21 |
1 files changed, 11 insertions, 10 deletions
diff --git a/nerv/examples/lmptb/rnn/init.lua b/nerv/examples/lmptb/rnn/init.lua index 0e08cb6..6507582 100644 --- a/nerv/examples/lmptb/rnn/init.lua +++ b/nerv/examples/lmptb/rnn/init.lua @@ -1,26 +1,26 @@ -local Layer = nerv.class('nerv.LayerT') +local LayerT = nerv.class('nerv.LayerT') -function Layer:__init(id, global_conf, layer_conf) +function LayerT:__init(id, global_conf, layer_conf) nerv.error_method_not_implemented() end -function Layer:init(batch_size, chunk_size) +function LayerT:init(batch_size, chunk_size) nerv.error_method_not_implemented() end -function Layer:update(bp_err, input, output, t) +function LayerT:update(bp_err, input, output, t) nerv.error_method_not_implemented() end -function Layer:propagate(input, output, t) +function LayerT:propagate(input, output, t) nerv.error_method_not_implemented() end -function Layer:back_propagate(bp_err, next_bp_err, input, output, t) +function LayerT:back_propagate(bp_err, next_bp_err, input, output, t) nerv.error_method_not_implemented() end -function Layer:check_dim_len(len_in, len_out) +function LayerT:check_dim_len(len_in, len_out) local expected_in = #self.dim_in local expected_out = #self.dim_out if len_in > 0 and expected_in ~= len_in then @@ -33,13 +33,14 @@ function Layer:check_dim_len(len_in, len_out) end end -function Layer:get_params() +function LayerT:get_params() nerv.error_method_not_implemented() end -function Layer:get_dim() +function LayerT:get_dim() return self.dim_in, self.dim_out end nerv.include('tnn.lua') -nerv.include('softmax_ce_t.lua') +nerv.include('layersT/softmax_ce_t.lua') +nerv.include('layers/gate_fff.lua') |