aboutsummaryrefslogtreecommitdiff
path: root/layer/bias.lua
diff options
context:
space:
mode:
authorDeterminant <[email protected]>2015-06-22 19:01:29 +0800
committerDeterminant <[email protected]>2015-06-22 19:01:29 +0800
commit2497fd9e7a0fae5ee4887890d7a312e0e08a93b8 (patch)
tree382f97575bd2df9ee6abb1662b11b279fc22d72b /layer/bias.lua
parent196e9b48a3541caccdffc5743001cced70667091 (diff)
major change: use luarocks to manage project
Diffstat (limited to 'layer/bias.lua')
-rw-r--r--layer/bias.lua28
1 files changed, 0 insertions, 28 deletions
diff --git a/layer/bias.lua b/layer/bias.lua
deleted file mode 100644
index c99274d..0000000
--- a/layer/bias.lua
+++ /dev/null
@@ -1,28 +0,0 @@
-local BiasLayer = nerv.class("nerv.BiasLayer", "nerv.Layer")
-
-function BiasLayer:__init(id, global_conf, layer_conf)
- self.id = id
- self.gconf = global_conf
- self.bias = layer_conf.bias
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self:check_dim_len(1, 1)
-end
-
-function BiasLayer:init()
- if self.dim_in[1] ~= self.bias.trans:ncol() then
- nerv.error("mismatching dimensions of input and bias parameter")
- end
- if self.dim_out[1] ~= self.bias.trans:ncol() then
- nerv.error("mismatching dimensions of output and bias parameter")
- end
-end
-
-function BiasLayer:propagate(input, output)
- output[1]:copy_fromd(input[1])
- output[1]:add_row(self.bias.trans, 1.0)
-end
-
-function BiasLayer:get_params()
- return nerv.ParamRepo({self.bias})
-end