aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcloudygoose <[email protected]>2015-07-10 11:09:42 +0800
committercloudygoose <[email protected]>2015-07-10 11:09:42 +0800
commit882541b311ae9f18c916609a74708cb29bc1abdd (patch)
treea2f07c309902eb7f104e029677384dbba9ae0164
parent72acf24e248cca7d69658d02939f99d57d02e9a9 (diff)
added affine_recurrent layer
-rw-r--r--.gitignore1
-rw-r--r--nerv/Makefile4
-rw-r--r--nerv/layer/affine_recurrent.lua83
-rw-r--r--nerv/layer/init.lua1
4 files changed, 87 insertions, 2 deletions
diff --git a/.gitignore b/.gitignore
index 24f2f11..5c87b71 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
*.o
install/
+build/
*.swp
*.swo
diff --git a/nerv/Makefile b/nerv/Makefile
index 7b75522..022e2fb 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -31,9 +31,9 @@ OBJS := $(CORE_OBJS) $(NERV_OBJS) $(LUAT_OBJS)
LIBS := $(INST_LIBDIR)/libnerv.so $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT.so
LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/init.lua layer/affine.lua layer/sigmoid.lua layer/softmax_ce.lua \
- layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua \
+ layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua layer/affine_recurrent.lua\
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
- io/sgd_buffer.lua
+ io/sgd_buffer.lua
INCLUDE := -I $(LUA_INCDIR) -DLUA_USE_APICHECK
CUDA_BASE := /usr/local/cuda-6.5
diff --git a/nerv/layer/affine_recurrent.lua b/nerv/layer/affine_recurrent.lua
new file mode 100644
index 0000000..5afdaa1
--- /dev/null
+++ b/nerv/layer/affine_recurrent.lua
@@ -0,0 +1,83 @@
+local Recurrent = nerv.class('nerv.AffineRecurrentLayer', 'nerv.Layer')
+
+--id: string
+--global_conf: table
+--layer_conf: table
+--Get Parameters
+function Recurrent:__init(id, global_conf, layer_conf)
+ self.id = id
+ self.dim_in = layer_conf.dim_in
+ self.dim_out = layer_conf.dim_out
+ self.gconf = global_conf
+
+ self.bp = layer_conf.bp
+ self.ltp_hh = layer_conf.ltp_hh --from hidden to hidden
+
+ self:check_dim_len(2, 1)
+ self.direct_update = layer_conf.direct_update
+end
+
+--Check parameter
+function Recurrent:init(batch_size)
+ if (self.ltp_hh.trans:ncol() ~= self.bp.trans:ncol()) then
+ nerv.error("mismatching dimensions of ltp and bp")
+ end
+ if (self.dim_in[1] ~= self.ltp_hh.trans:nrow() or
+ self.dim_in[2] ~= self.ltp_hh.trans:nrow()) then
+ nerv.error("mismatching dimensions of ltp and input")
+ end
+ if (self.dim_out[1] ~= self.bp.trans:ncol()) then
+ nerv.error("mismatching dimensions of bp and output")
+ end
+
+ self.ltp_hh_grad = self.ltp_hh.trans:create()
+ self.ltp_hh:train_init()
+ self.bp:train_init()
+end
+
+function Recurrent:update(bp_err, input, output)
+ if (self.direct_update == true) then
+ local ltp_hh = self.ltp_hh.trans
+ local bp = self.bp.trans
+ local gconf = self.gconf
+ -- momentum gain
+ local mmt_gain = 1.0 / (1.0 - gconf.momentum);
+ local n = input[1]:nrow() * mmt_gain
+ -- update corrections (accumulated errors)
+ self.ltp_hh.correction:mul(input[2], bp_err[1], 1.0, gconf.momentum, 'T', 'N')
+ self.bp.correction:add(bc, bp_err[1]:colsum(), gconf.momentum, 1.0)
+ -- perform update
+ ltp_hh:add(ltp_hh, self.ltp_hh.correction, 1.0, -gconf.lrate / n)
+ bp:add(bp, self.bp.correction, 1.0, -gconf.lrate / n)
+ -- weight decay
+ ltp_hh:add(ltp_hh, ltp_hh, 1.0, -gconf.lrate * gconf.wcost)
+ else
+ self.ltp_hh_grad:mul(input[2], bp_err[1], 1.0, 0.0, 'T', 'N')
+ self.ltp_hh:update(self.ltp_hh_grad)
+ self.bp:update(bp_err[1]:colsum())
+ end
+end
+
+function Recurrent:propagate(input, output)
+ output[1]:copy_fromd(input[1])
+ output[1]:mul(input[2], self.ltp_hh.trans, 1.0, 1.0, 'N', 'N')
+ output[1]:add_row(self.bp.trans, 1.0)
+end
+
+function Recurrent:back_propagate(bp_err, next_bp_err, input, output)
+ next_bp_err[1]:copy_fromd(bp_err[1])
+ next_bp_err[2]:mul(bp_err[1], self.ltp_hh.trans, 1.0, 0.0, 'N', 'T')
+ --[[
+ for i = 0, next_bp_err[2]:nrow() - 1 do
+ for j = 0, next_bp_err[2]:ncol() - 1 do
+ if (next_bp_err[2][i][j] > 10) then next_bp_err[2][i][j] = 10 end
+ if (next_bp_err[2][i][j] < -10) then next_bp_err[2][i][j] = -10 end
+ end
+ end
+ ]]--
+ next_bp_err[2]:clip(-10, 10)
+end
+
+function Recurrent:get_params()
+ return nerv.ParamRepo({self.ltp_hh, self.bp})
+end
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 3c55a94..7172f99 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -77,3 +77,4 @@ nerv.include('bias.lua')
nerv.include('window.lua')
nerv.include('mse.lua')
nerv.include('combiner.lua')
+nerv.include('affine_recurrent.lua')