aboutsummaryrefslogtreecommitdiff
path: root/layer/init.lua
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-02 20:28:16 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-02 20:28:16 +0800
commit74d9e9e7371c80394698fb9805cbf0cbde67a8f3 (patch)
tree36b070f1fcfa2be8fc80c50b7a221862a0dfd14a /layer/init.lua
parent60083f2e51935ce55cec7a4c39d1724a16d9c769 (diff)
add ParamRepo, LayerRepo, DAGLayer
Diffstat (limited to 'layer/init.lua')
-rw-r--r--layer/init.lua14
1 files changed, 12 insertions, 2 deletions
diff --git a/layer/init.lua b/layer/init.lua
index 4881cb7..c8c691b 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -46,8 +46,8 @@ function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output)
end
function nerv.Layer:check_dim_len(len_in, len_out)
- local expected_in = table.getn(self.dim_in)
- local expected_out = table.getn(self.dim_out)
+ local expected_in = #self.dim_in
+ local expected_out = #self.dim_out
if len_in > 0 and expected_in ~= len_in then
nerv.error("layer %s expects %d inputs, %d given",
self.id, len_in, expected_in)
@@ -57,3 +57,13 @@ function nerv.Layer:check_dim_len(len_in, len_out)
self.id, len_out, expected_out)
end
end
+
+function nerv.Layer:get_dim()
+ return self.dim_in, self.dim_out
+end
+
+require 'layer.affine'
+require 'layer.sigmoid'
+require 'layer.softmax_ce'
+require 'layer.bias'
+require 'layer.window'