diff options
author | Determinant <[email protected]> | 2015-06-02 12:51:18 +0800 |
---|---|---|
committer | Determinant <[email protected]> | 2015-06-02 12:51:18 +0800 |
commit | 0d3d8f4afdc38726b8ed933dbfcb85e759145c43 (patch) | |
tree | d0ea9b021e710b9ac8aea4bbcd56922f3fe1f1fe /layer/init.lua | |
parent | bf05d75bf173e1a496a277c76593537dc9cdb28a (diff) |
add preprocessing layers and change layer constructor interface
Diffstat (limited to 'layer/init.lua')
-rw-r--r-- | layer/init.lua | 13 |
1 files changed, 13 insertions, 0 deletions
diff --git a/layer/init.lua b/layer/init.lua index a98621d..4881cb7 100644 --- a/layer/init.lua +++ b/layer/init.lua @@ -44,3 +44,16 @@ end function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output) nerv.error_method_not_implemented() end + +function nerv.Layer:check_dim_len(len_in, len_out) + local expected_in = table.getn(self.dim_in) + local expected_out = table.getn(self.dim_out) + if len_in > 0 and expected_in ~= len_in then + nerv.error("layer %s expects %d inputs, %d given", + self.id, len_in, expected_in) + end + if len_out > 0 and expected_out ~= len_out then + nerv.error("layer %s expects %d outputs, %d given", + self.id, len_out, expected_out) + end +end |