aboutsummaryrefslogtreecommitdiff
path: root/layer
diff options
context:
space:
mode:
Diffstat (limited to 'layer')
-rw-r--r--layer/combiner.lua55
-rw-r--r--layer/init.lua2
-rw-r--r--layer/mse.lua52
-rw-r--r--layer/softmax_ce.lua12
-rw-r--r--layer/window.lua2
5 files changed, 120 insertions, 3 deletions
diff --git a/layer/combiner.lua b/layer/combiner.lua
new file mode 100644
index 0000000..2eac83c
--- /dev/null
+++ b/layer/combiner.lua
@@ -0,0 +1,55 @@
+local CombinerLayer = nerv.class('nerv.CombinerLayer', 'nerv.Layer')
+
+function CombinerLayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.lambda = layer_conf.lambda
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(#self.lambda, -1)
+end
+
+function CombinerLayer:init()
+ local dim = self.dim_in[1]
+ for i = 2, #self.dim_in do
+ if self.dim_in[i] ~= dim then
+ nerv.error("mismatching dimensions of inputs")
+ end
+ end
+ for i = 1, #self.dim_out do
+ if self.dim_out[i] ~= dim then
+ nerv.error("mismatching dimensions of inputs/outputs")
+ end
+ end
+end
+
+function CombinerLayer:update(bp_err, input, output)
+end
+
+function CombinerLayer:propagate(input, output)
+ output[1]:fill(0)
+ for i = 1, #self.dim_in do
+ output[1]:add(output[1], input[i], 1.0, self.lambda[i])
+ end
+ for i = 2, #self.dim_out do
+ output[i]:copy_fromd(output[1])
+ end
+end
+
+function CombinerLayer:back_propagate(next_bp_err, bp_err, input, output)
+ local sum = bp_err[1]:create()
+ sum:fill(0)
+ for i = 1, #self.dim_out do
+ sum:add(sum, bp_err[i], 1.0, 1.0)
+ end
+ for i = 1, #self.dim_in do
+ local scale = nerv.CuMatrixFloat(sum:nrow(), 1)
+ scale:fill(self.lambda[i])
+ next_bp_err[i]:copy_fromd(sum)
+ next_bp_err[i]:scale_rows_by_col(scale)
+ end
+end
+
+function CombinerLayer:get_params()
+ return {self.lambda}
+end
diff --git a/layer/init.lua b/layer/init.lua
index 844f46b..169427d 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -71,3 +71,5 @@ require 'layer.sigmoid'
require 'layer.softmax_ce'
require 'layer.bias'
require 'layer.window'
+require 'layer.mse'
+require 'layer.combiner'
diff --git a/layer/mse.lua b/layer/mse.lua
new file mode 100644
index 0000000..da5b24d
--- /dev/null
+++ b/layer/mse.lua
@@ -0,0 +1,52 @@
+local MSELayer = nerv.class("nerv.MSELayer", "nerv.Layer")
+
+function MSELayer:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+ self:check_dim_len(2, -1)
+end
+
+function MSELayer:init()
+ if self.dim_in[1] ~= self.dim_in[2] then
+ nerv.error("mismatching dimensions of previous network output and labels")
+ end
+ self.total_mse = 0.0
+ self.total_frames = 0
+end
+
+function MSELayer:update(bp_err, input, output)
+ -- no params, therefore do nothing
+end
+
+function MSELayer:propagate(input, output)
+ local mse = input[1]:create()
+ mse:add(input[1], input[2], 1.0, -1.0)
+ self.diff = mse:create()
+ self.diff:copy_fromd(mse)
+ mse:mul_elem(mse, mse)
+ mse = mse:rowsum(mse)
+ local scale = nerv.CuMatrixFloat(mse:nrow(), 1)
+ scale:fill(1 / input[1]:ncol())
+ mse:scale_rows_by_col(scale)
+ if output[1] ~= nil then
+ output[1]:copy_fromd(mse)
+ end
+ self.total_mse = self.total_mse + mse:colsum()[0]
+ self.total_frames = self.total_frames + mse:nrow()
+end
+
+-- NOTE: must call propagate before back_propagate
+function MSELayer:back_propagate(next_bp_err, bp_err, input, output)
+ local nbe = next_bp_err[1]
+ nbe:copy_fromd(self.diff)
+ self.diff = nil
+ if bp_err[1] ~= nil then
+ nbe:scale_rows_by_col(bp_err[1])
+ end
+end
+
+function MSELayer:get_params()
+ return {}
+end
diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua
index 2e1f5fb..7888540 100644
--- a/layer/softmax_ce.lua
+++ b/layer/softmax_ce.lua
@@ -36,8 +36,12 @@ function SoftmaxCELayer:propagate(input, output)
label = label:decompress(input[1]:ncol())
end
ce:mul_elem(ce, label)
+ ce = ce:rowsum()
+ if output[1] ~= nil then
+ output[1]:copy_fromd(ce)
+ end
-- add total ce
- self.total_ce = self.total_ce - ce:rowsum():colsum()[0]
+ self.total_ce = self.total_ce - ce:colsum()[0]
self.total_frames = self.total_frames + soutput:nrow()
-- TODO: add colsame for uncompressed label
if self.compressed then
@@ -51,7 +55,11 @@ function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output)
if self.compressed then
label = label:decompress(input[1]:ncol())
end
- next_bp_err[1]:add(self.soutput, label, 1.0, -1.0)
+ local nbe = next_bp_err[1]
+ nbe:add(self.soutput, label, 1.0, -1.0)
+ if bp_err[1] ~= nil then
+ nbe:scale_rows_by_col(bp_err[1])
+ end
end
function SoftmaxCELayer:get_params()
diff --git a/layer/window.lua b/layer/window.lua
index b381c9b..3a093f4 100644
--- a/layer/window.lua
+++ b/layer/window.lua
@@ -20,7 +20,7 @@ end
function WindowLayer:propagate(input, output)
output[1]:copy_fromd(input[1])
- output[1]:scale_row(self.window.trans)
+ output[1]:scale_rows_by_row(self.window.trans)
end
function WindowLayer:get_params()