aboutsummaryrefslogtreecommitdiff
path: root/nerv/layer
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-22 19:01:29 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-22 19:01:29 +0800
commit2497fd9e7a0fae5ee4887890d7a312e0e08a93b8 (patch)
tree382f97575bd2df9ee6abb1662b11b279fc22d72b /nerv/layer
parent196e9b48a3541caccdffc5743001cced70667091 (diff)
major change: use luarocks to manage project
Diffstat (limited to 'nerv/layer')
-rw-r--r--nerv/layer/affine.lua91
-rw-r--r--nerv/layer/bias.lua28
-rw-r--r--nerv/layer/combiner.lua59
-rw-r--r--nerv/layer/init.lua79
-rw-r--r--nerv/layer/mse.lua52
-rw-r--r--nerv/layer/sigmoid.lua31
-rw-r--r--nerv/layer/softmax_ce.lua68
-rw-r--r--nerv/layer/window.lua28
8 files changed, 436 insertions, 0 deletions
diff --git a/nerv/layer/affine.lua b/nerv/layer/affine.lua
new file mode 100644
index 0000000..00cbcfb
--- /dev/null
+++ b/nerv/layer/affine.lua
@@ -0,0 +1,91 @@
+local MatrixParam = nerv.class('nerv.MatrixParam', 'nerv.Param')
+local LinearTransParam = nerv.class('nerv.LinearTransParam', 'nerv.MatrixParam')
+local BiasParam = nerv.class('nerv.BiasParam', 'nerv.MatrixParam')
+local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')
+
+function MatrixParam:read(handle)
+ self.trans = self.gconf.cumat_type.new_from_host(
+ nerv.MMatrixFloat.load(handle))
+end
+
+function MatrixParam:write(handle)
+ self.trans:new_to_host():save(handle)
+end
+
+function MatrixParam:train_init()
+ self.correction = self.trans:create()
+ self.correction:fill(0)
+end
+
+function MatrixParam:update(gradient)
+ local gconf = self.gconf
+ self.correction:add(self.correction, gradient, gconf.momentum, 1.0)
+ -- momentum gain
+ local mmt_gain = 1.0 / (1.0 - gconf.momentum);
+ local n = self.gconf.batch_size * mmt_gain
+ -- perform update
+ self.trans:add(self.trans, self.correction, 1.0, -gconf.lrate / n)
+end
+
+function LinearTransParam:update(gradient)
+ MatrixParam.update(self, gradient)
+ local gconf = self.gconf
+ -- weight decay
+ self.trans:add(self.trans, self.trans, 1.0, -gconf.lrate * gconf.wcost)
+end
+
+function AffineLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.ltp = layer_conf.ltp
+ self.bp = layer_conf.bp
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(1, 1) -- exactly one input and one output
+ self.direct_update = layer_conf.direct_update
+end
+
+function AffineLayer:init(batch_size)
+ if self.ltp.trans:ncol() ~= self.bp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform and bias paramter")
+ end
+ if self.dim_in[1] ~= self.ltp.trans:nrow() then
+ nerv.error("mismatching dimensions of linear transform parameter and input")
+ end
+ if self.dim_out[1] ~= self.ltp.trans:ncol() then
+ nerv.error("mismatching dimensions of linear transform parameter and output")
+ end
+ self.ltp_grad = self.ltp.trans:create()
+ self.ltp:train_init()
+ self.bp:train_init()
+end
+
+function AffineLayer:update(bp_err, input, output)
+ if self.direct_update then
+ self.ltp.correction:mul(input[1], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
+ -- momentum gain
+ local mmt_gain = 1.0 / (1.0 - gconf.momentum);
+ local n = self.gconf.batch_size * mmt_gain
+ -- perform update
+ self.ltp.trans:add(self.ltp.trans, self.ltp.correction, 1.0, -gconf.lrate / n)
+ else
+ self.ltp_grad:mul(input[1], bp_err[1], 1.0, 0.0, 'T', 'N')
+ self.ltp:update(self.ltp_grad)
+ end
+ self.bp:update(bp_err[1]:colsum())
+end
+
+function AffineLayer:propagate(input, output)
+ -- apply linear transform
+ output[1]:mul(input[1], self.ltp.trans, 1.0, 0.0, 'N', 'N')
+ -- add bias
+ output[1]:add_row(self.bp.trans, 1.0)
+end
+
+function AffineLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:mul(bp_err[1], self.ltp.trans, 1.0, 0.0, 'N', 'T')
+end
+
+function AffineLayer:get_params()
+ return nerv.ParamRepo({self.ltp, self.bp})
+end
diff --git a/nerv/layer/bias.lua b/nerv/layer/bias.lua
new file mode 100644
index 0000000..c99274d
--- /dev/null
+++ b/nerv/layer/bias.lua
@@ -0,0 +1,28 @@
+local BiasLayer = nerv.class("nerv.BiasLayer", "nerv.Layer")
+
+function BiasLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.bias = layer_conf.bias
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1)
+end
+
+function BiasLayer:init()
+ if self.dim_in[1] ~= self.bias.trans:ncol() then
+ nerv.error("mismatching dimensions of input and bias parameter")
+ end
+ if self.dim_out[1] ~= self.bias.trans:ncol() then
+ nerv.error("mismatching dimensions of output and bias parameter")
+ end
+end
+
+function BiasLayer:propagate(input, output)
+ output[1]:copy_fromd(input[1])
+ output[1]:add_row(self.bias.trans, 1.0)
+end
+
+function BiasLayer:get_params()
+ return nerv.ParamRepo({self.bias})
+end
diff --git a/nerv/layer/combiner.lua b/nerv/layer/combiner.lua
new file mode 100644
index 0000000..7bd7617
--- /dev/null
+++ b/nerv/layer/combiner.lua
@@ -0,0 +1,59 @@
+local CombinerLayer = nerv.class('nerv.CombinerLayer', 'nerv.Layer')
+
+function CombinerLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.lambda = layer_conf.lambda
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(#self.lambda, -1)
+ if #self.dim_in < 1 then
+ nerv.error("no input specified")
+ end
+ if #self.dim_out < 1 then
+ nerv.error("no output specified")
+ end
+end
+
+function CombinerLayer:init(batch_size)
+ local dim = self.dim_in[1]
+ for i = 2, #self.dim_in do
+ if self.dim_in[i] ~= dim then
+ nerv.error("mismatching dimensions of inputs")
+ end
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= dim then
+ nerv.error("mismatching dimensions of inputs/outputs")
+ end
+ end
+ self.sum = self.gconf.cumat_type(batch_size, dim)
+end
+
+function CombinerLayer:update(bp_err, input, output)
+end
+
+function CombinerLayer:propagate(input, output)
+ output[1]:fill(0)
+ for i = 1, #self.dim_in do
+ output[1]:add(output[1], input[i], 1.0, self.lambda[i])
+ end
+ for i = 2, #self.dim_out do
+ output[i]:copy_fromd(output[1])
+ end
+end
+
+function CombinerLayer:back_propagate(bp_err, next_bp_err, input, output)
+ local sum = self.sum
+ sum:copy_fromd(bp_err[1])
+ for i = 2, #self.dim_out do
+ sum:add(sum, bp_err[i], 1.0, 1.0)
+ end
+ for i = 1, #self.dim_in do
+ next_bp_err[i]:add(next_bp_err[i], sum, 0.0, self.lambda[i])
+ end
+end
+
+function CombinerLayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
new file mode 100644
index 0000000..3c55a94
--- /dev/null
+++ b/nerv/layer/init.lua
@@ -0,0 +1,79 @@
+-- The following methods must be implemented to let a layer work properly
+
+local Param = nerv.class('nerv.Param')
+
+function Param:__init(id, global_conf)
+ self.id = id
+ self.gconf = global_conf
+end
+
+function Param:get_info()
+ return self.info
+end
+
+function Param:set_info(info)
+ self.info = info
+end
+
+function Param:read(handle)
+ nerv.error_method_not_implemented()
+end
+
+function Param:write(handle)
+ nerv.error_method_not_implemented()
+end
+
+function Param:update(gradient)
+ nerv.error_method_not_implemented()
+end
+
+local Layer = nerv.class('nerv.Layer')
+
+function Layer:__init(id, global_conf, layer_conf)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:init(batch_size)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:update(bp_err, input, output)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:propagate(input, output)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:back_propagate(bp_err, next_bp_err, input, output)
+ nerv.error_method_not_implemented()
+end
+
+function Layer:check_dim_len(len_in, len_out)
+ local expected_in = #self.dim_in
+ local expected_out = #self.dim_out
+ if len_in > 0 and expected_in ~= len_in then
+ nerv.error("layer %s expects %d inputs, %d given",
+ self.id, len_in, expected_in)
+ end
+ if len_out > 0 and expected_out ~= len_out then
+ nerv.error("layer %s expects %d outputs, %d given",
+ self.id, len_out, expected_out)
+ end
+end
+
+function Layer:get_params()
+ nerv.error_method_not_implemented()
+end
+
+function Layer:get_dim()
+ return self.dim_in, self.dim_out
+end
+
+nerv.include('affine.lua')
+nerv.include('sigmoid.lua')
+nerv.include('softmax_ce.lua')
+nerv.include('bias.lua')
+nerv.include('window.lua')
+nerv.include('mse.lua')
+nerv.include('combiner.lua')
diff --git a/nerv/layer/mse.lua b/nerv/layer/mse.lua
new file mode 100644
index 0000000..9a97add
--- /dev/null
+++ b/nerv/layer/mse.lua
@@ -0,0 +1,52 @@
+local MSELayer = nerv.class("nerv.MSELayer", "nerv.Layer")
+
+function MSELayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(2, -1)
+end
+
+function MSELayer:init(batch_size)
+ if self.dim_in[1] ~= self.dim_in[2] then
+ nerv.error("mismatching dimensions of previous network output and labels")
+ end
+ self.scale = 1 / self.dim_in[1]
+ self.total_mse = 0.0
+ self.total_frames = 0
+ self.mse = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.mse_sum = self.gconf.cumat_type(batch_size, 1)
+ self.diff = self.mse:create()
+end
+
+function MSELayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function MSELayer:propagate(input, output)
+ local mse = self.mse
+ local mse_sum = self.mse_sum
+ mse:add(input[1], input[2], 1.0, -1.0)
+ self.diff:copy_fromd(mse)
+ mse:mul_elem(mse, mse)
+ mse_sum:add(mse_sum, mse:rowsum(mse), 0.0, self.scale)
+ if output[1] ~= nil then
+ output[1]:copy_fromd(mse_sum)
+ end
+ self.total_mse = self.total_mse + mse_sum:colsum()[0]
+ self.total_frames = self.total_frames + mse_sum:nrow()
+end
+
+-- NOTE: must call propagate before back_propagate
+function MSELayer:back_propagate(bp_err, next_bp_err, input, output)
+ local nbe = next_bp_err[1]
+ nbe:add(nbe, self.diff, 0.0, 2 * self.scale)
+ if bp_err[1] ~= nil then
+ nbe:scale_rows_by_col(bp_err[1])
+ end
+end
+
+function MSELayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/layer/sigmoid.lua b/nerv/layer/sigmoid.lua
new file mode 100644
index 0000000..dfd09eb
--- /dev/null
+++ b/nerv/layer/sigmoid.lua
@@ -0,0 +1,31 @@
+local SigmoidLayer = nerv.class("nerv.SigmoidLayer", "nerv.Layer")
+
+function SigmoidLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1)
+end
+
+function SigmoidLayer:init()
+ if self.dim_in[1] ~= self.dim_out[1] then
+ nerv.error("mismatching dimensions of input and output")
+ end
+end
+
+function SigmoidLayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function SigmoidLayer:propagate(input, output)
+ output[1]:sigmoid(input[1])
+end
+
+function SigmoidLayer:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:sigmoid_grad(bp_err[1], output[1])
+end
+
+function SigmoidLayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/layer/softmax_ce.lua b/nerv/layer/softmax_ce.lua
new file mode 100644
index 0000000..daf891e
--- /dev/null
+++ b/nerv/layer/softmax_ce.lua
@@ -0,0 +1,68 @@
+local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer")
+
+function SoftmaxCELayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.compressed = layer_conf.compressed
+ if self.compressed == nil then
+ self.compressed = false
+ end
+ self:check_dim_len(2, -1) -- two inputs: nn output and label
+end
+
+function SoftmaxCELayer:init(batch_size)
+ if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then
+ nerv.error("mismatching dimensions of previous network output and labels")
+ end
+ self.total_ce = 0.0
+ self.total_correct = 0
+ self.total_frames = 0
+ self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1])
+ self.ce = self.softmax:create()
+end
+
+function SoftmaxCELayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function SoftmaxCELayer:propagate(input, output)
+ local softmax = self.softmax
+ local ce = self.ce
+ local classified = softmax:softmax(input[1])
+ local label = input[2]
+ ce:log_elem(softmax)
+ if self.compressed then
+ label = label:decompress(input[1]:ncol())
+ end
+ ce:mul_elem(ce, label)
+ ce = ce:rowsum()
+ if output[1] ~= nil then
+ output[1]:copy_fromd(ce)
+ end
+ -- add total ce
+ self.total_ce = self.total_ce - ce:colsum()[0]
+ self.total_frames = self.total_frames + softmax:nrow()
+ -- TODO: add colsame for uncompressed label
+ if self.compressed then
+ self.total_correct = self.total_correct + classified:colsame(input[2])[0]
+ end
+end
+
+function SoftmaxCELayer:back_propagate(bp_err, next_bp_err, input, output)
+ -- softmax output - label
+ local label = input[2]
+ if self.compressed then
+ label = label:decompress(input[1]:ncol())
+ end
+ local nbe = next_bp_err[1]
+ nbe:add(self.softmax, label, 1.0, -1.0)
+ if bp_err[1] ~= nil then
+ nbe:scale_rows_by_col(bp_err[1])
+ end
+end
+
+function SoftmaxCELayer:get_params()
+ return nerv.ParamRepo({})
+end
diff --git a/nerv/layer/window.lua b/nerv/layer/window.lua
new file mode 100644
index 0000000..4e9a3b1
--- /dev/null
+++ b/nerv/layer/window.lua
@@ -0,0 +1,28 @@
+local WindowLayer = nerv.class("nerv.WindowLayer", "nerv.Layer")
+
+function WindowLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.gconf = global_conf
+ self.window = layer_conf.window
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self:check_dim_len(1, 1)
+end
+
+function WindowLayer:init()
+ if self.dim_in[1] ~= self.window.trans:ncol() then
+ nerv.error("mismatching dimensions of input and window parameter")
+ end
+ if self.dim_out[1] ~= self.window.trans:ncol() then
+ nerv.error("mismatching dimensions of output and window parameter")
+ end
+end
+
+function WindowLayer:propagate(input, output)
+ output[1]:copy_fromd(input[1])
+ output[1]:scale_rows_by_row(self.window.trans)
+end
+
+function WindowLayer:get_params()
+ return nerv.ParamRepo({self.window})
+end