diff options
author | Determinant <ted.sybil@gmail.com> | 2016-02-21 00:28:54 +0800 |
---|---|---|
committer | Determinant <ted.sybil@gmail.com> | 2016-02-21 00:28:54 +0800 |
commit | 8f19acf152652ff887d3fe978e78a076dca60611 (patch) | |
tree | 8ced512733bca426d479f44320f15110090ac986 /nerv/layer/init.lua | |
parent | 620c1971c3c821337cd16cca20cddd27f7bc6085 (diff) |
add layers from `layersT/` to `layer/`
Diffstat (limited to 'nerv/layer/init.lua')
-rw-r--r-- | nerv/layer/init.lua | 6 |
1 files changed, 5 insertions, 1 deletions
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua index 43c2250..6b7a1d7 100644 --- a/nerv/layer/init.lua +++ b/nerv/layer/init.lua @@ -90,7 +90,7 @@ function Layer:find_param(pid_list, lconf, gconf, p_type, p_dim) end end nerv.info("param [%s] of layer [%s] is not found in `layer_conf` or `layer_conf.pr`, " .. - "switch to auto-generate.", pid_list_str, self.id) + "switch to auto-generate", pid_list_str, self.id) local pid_g = self.id .. '_' .. pid_list[1] p = p_type(pid_g, gconf) p.trans = gconf.cumat_type(unpack(p_dim)) @@ -113,3 +113,7 @@ nerv.include('affine_recurrent.lua') nerv.include('softmax.lua') nerv.include('elem_mul.lua') nerv.include('gate_fff.lua') +nerv.include('lstm.lua') +nerv.include('lstm_gate.lua') +nerv.include('dropout.lua') +nerv.include('gru.lua') |