aboutsummaryrefslogtreecommitdiff
path: root/layer
diff options
context:
space:
mode:
authorDeterminant <ted.sybil@gmail.com>2015-06-03 23:00:45 +0800
committerDeterminant <ted.sybil@gmail.com>2015-06-03 23:00:45 +0800
commitea6f2990f99dd9ded6a0e74d75a3ec84900a2518 (patch)
tree03b4ea34fa373189bf6b2b017bf54793d5c89f8e /layer
parentbb56a806e0636a0b20117b1644701d63e2bfaefb (diff)
demo now works (without random shuffle)
Diffstat (limited to 'layer')
-rw-r--r--layer/affine.lua2
-rw-r--r--layer/init.lua24
-rw-r--r--layer/softmax_ce.lua20
3 files changed, 28 insertions, 18 deletions
diff --git a/layer/affine.lua b/layer/affine.lua
index 90a1d16..59a0e91 100644
--- a/layer/affine.lua
+++ b/layer/affine.lua
@@ -4,7 +4,7 @@ local BiasParam = nerv.class('nerv.BiasParam', 'nerv.MatrixParam')
local AffineLayer = nerv.class('nerv.AffineLayer', 'nerv.Layer')
function MatrixParam:read(pcdata)
- self.trans = self.gconf.mat_type.new_from_host(
+ self.trans = self.gconf.cumat_type.new_from_host(
nerv.MMatrixFloat.load(pcdata))
end
diff --git a/layer/init.lua b/layer/init.lua
index c8c691b..38bcd7f 100644
--- a/layer/init.lua
+++ b/layer/init.lua
@@ -2,50 +2,50 @@
local Param = nerv.class('nerv.Param')
-function nerv.Param:__init(id, global_conf)
+function Param:__init(id, global_conf)
self.id = id
self.gconf = global_conf
end
-function nerv.Param:get_info()
+function Param:get_info()
return self.info
end
-function nerv.Param:set_info(info)
+function Param:set_info(info)
self.info = info
end
-function nerv.Param:read(pfhandle)
+function Param:read(pfhandle)
nerv.error_method_not_implemented()
end
-function nerv.Param:write(pfhandle)
+function Param:write(pfhandle)
nerv.error_method_not_implemented()
end
local Layer = nerv.class('nerv.Layer')
-function nerv.Layer:__init(id, global_conf, ...)
+function Layer:__init(id, global_conf, layer_conf)
nerv.error_method_not_implemented()
end
-function nerv.Layer:init(id)
+function Layer:init(id)
nerv.error_method_not_implemented()
end
-function nerv.Layer:update(bp_err, input, output)
+function Layer:update(bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:propagate(input, output)
+function Layer:propagate(input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:back_propagate(next_bp_err, bp_err, input, output)
+function Layer:back_propagate(next_bp_err, bp_err, input, output)
nerv.error_method_not_implemented()
end
-function nerv.Layer:check_dim_len(len_in, len_out)
+function Layer:check_dim_len(len_in, len_out)
local expected_in = #self.dim_in
local expected_out = #self.dim_out
if len_in > 0 and expected_in ~= len_in then
@@ -58,7 +58,7 @@ function nerv.Layer:check_dim_len(len_in, len_out)
end
end
-function nerv.Layer:get_dim()
+function Layer:get_dim()
return self.dim_in, self.dim_out
end
diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua
index 09eb3a9..cf98c45 100644
--- a/layer/softmax_ce.lua
+++ b/layer/softmax_ce.lua
@@ -5,6 +5,10 @@ function SoftmaxCELayer:__init(id, global_conf, layer_conf)
self.gconf = global_conf
self.dim_in = layer_conf.dim_in
self.dim_out = layer_conf.dim_out
+ self.compressed = layer_conf.compressed
+ if self.compressed == nil then
+ self.compressed = false
+ end
self:check_dim_len(2, -1) -- two inputs: nn output and label
end
@@ -26,15 +30,21 @@ function SoftmaxCELayer:propagate(input, output)
soutput:softmax(input[1])
local ce = soutput:create()
ce:log_elem(soutput)
- ce:mul_elem(ce, input[2])
--- print(input[1][0])
--- print(soutput[1][0])
- -- add total ce
+ local label = input[2]
+ if self.compressed then
+ label = label:decompress(input[1]:ncol())
+ end
+ ce:mul_elem(ce, label)
+ -- add total ce
self.total_ce = self.total_ce - ce:rowsum():colsum()[0]
self.total_frames = self.total_frames + soutput:nrow()
end
function SoftmaxCELayer:back_propagate(next_bp_err, bp_err, input, output)
-- softmax output - label
- next_bp_err[1]:add(self.soutput, input[2], 1.0, -1.0)
+ local label = input[2]
+ if self.compressed then
+ label = label:decompress(input[1]:ncol())
+ end
+ next_bp_err[1]:add(self.soutput, label, 1.0, -1.0)
end