aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortxh18 <[email protected]>2015-12-03 12:48:49 +0800
committertxh18 <[email protected]>2015-12-03 12:48:49 +0800
commitbba82cac04474b8177ab45d41543bc993801a4e0 (patch)
tree4ec102da65f39d778b1ec9631a123540453b2158
parent63cd5b0ab0d2fd1693fdaec0e57b5e02ad718dfb (diff)
moved tnn to main nerv dir and added it to Makefile
-rw-r--r--nerv/Makefile7
-rw-r--r--nerv/examples/lmptb/lm_trainer.lua2
-rw-r--r--nerv/examples/lmptb/lmptb/lmseqreader.lua2
-rw-r--r--nerv/examples/lmptb/lstmlm_ptb_main.lua6
-rw-r--r--nerv/examples/lmptb/m-tests/sutil_test.lua2
-rw-r--r--nerv/examples/lmptb/rnnlm_ptb_main.lua2
-rw-r--r--nerv/init.lua1
-rw-r--r--nerv/layer/elem_mul.lua (renamed from nerv/examples/lmptb/tnn/layers/elem_mul.lua)0
-rw-r--r--nerv/layer/gate_fff.lua (renamed from nerv/examples/lmptb/tnn/layers/gate_fff.lua)0
-rw-r--r--nerv/layer/init.lua2
-rw-r--r--nerv/tnn/init.lua (renamed from nerv/examples/lmptb/tnn/init.lua)2
-rw-r--r--nerv/tnn/layer_dag_t.lua (renamed from nerv/examples/lmptb/tnn/layer_dag_t.lua)0
-rw-r--r--nerv/tnn/layersT/dropout_t.lua (renamed from nerv/examples/lmptb/tnn/layersT/dropout_t.lua)0
-rw-r--r--nerv/tnn/layersT/lstm_t.lua (renamed from nerv/examples/lmptb/tnn/layersT/lstm_t.lua)0
-rw-r--r--nerv/tnn/layersT/softmax_ce_t.lua (renamed from nerv/examples/lmptb/tnn/layersT/softmax_ce_t.lua)0
-rw-r--r--nerv/tnn/sutil.lua (renamed from nerv/examples/lmptb/tnn/sutil.lua)0
-rw-r--r--nerv/tnn/tnn.lua (renamed from nerv/examples/lmptb/tnn/tnn.lua)0
17 files changed, 15 insertions, 11 deletions
diff --git a/nerv/Makefile b/nerv/Makefile
index df6ce98..5c329f9 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -7,7 +7,7 @@ INC_PATH := $(LUA_BINDIR)/../include/nerv
LUA_DIR = $(INST_LUADIR)/nerv
OBJ_DIR := $(BUILD_DIR)/objs
ISUBDIR := io matrix luaT
-SUBDIR := matrix io layer examples nn lib/io lib/luaT lib/matrix
+SUBDIR := matrix io layer examples nn lib/io lib/luaT lib/matrix tnn/layersT
INC_SUBDIR := $(addprefix $(INC_PATH)/,$(ISUBDIR))
OBJ_SUBDIR := $(addprefix $(OBJ_DIR)/,$(SUBDIR))
@@ -32,8 +32,11 @@ LIBS := $(INST_LIBDIR)/libnerv.so $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT
LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/init.lua layer/affine.lua layer/sigmoid.lua layer/tanh.lua layer/softmax_ce.lua layer/softmax.lua \
layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua layer/affine_recurrent.lua \
+ layer/elem_mul.lua layer/gate_fff.lua \
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
- io/sgd_buffer.lua
+ io/sgd_buffer.lua \
+ tnn/init.lua tnn/layer_dag_t.lua tnn/sutil.lua tnn/tnn.lua \
+ tnn/layersT/dropout_t.lua tnn/layersT/lstm_t.lua tnn/layersT/softmax_ce_t.lua
INCLUDE := -I $(LUA_INCDIR) -DLUA_USE_APICHECK
#CUDA_BASE := /usr/local/cuda-7.0
diff --git a/nerv/examples/lmptb/lm_trainer.lua b/nerv/examples/lmptb/lm_trainer.lua
index a203cc6..e5384b1 100644
--- a/nerv/examples/lmptb/lm_trainer.lua
+++ b/nerv/examples/lmptb/lm_trainer.lua
@@ -2,7 +2,7 @@ require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
require 'lmptb.layer.init'
-require 'tnn.init'
+--require 'tnn.init'
require 'lmptb.lmseqreader'
local LMTrainer = nerv.class('nerv.LMTrainer')
diff --git a/nerv/examples/lmptb/lmptb/lmseqreader.lua b/nerv/examples/lmptb/lmptb/lmseqreader.lua
index ff07415..ead8d4c 100644
--- a/nerv/examples/lmptb/lmptb/lmseqreader.lua
+++ b/nerv/examples/lmptb/lmptb/lmseqreader.lua
@@ -1,5 +1,5 @@
require 'lmptb.lmvocab'
-require 'tnn.init'
+--require 'tnn.init'
local LMReader = nerv.class("nerv.LMSeqReader")
diff --git a/nerv/examples/lmptb/lstmlm_ptb_main.lua b/nerv/examples/lmptb/lstmlm_ptb_main.lua
index 53a7bd5..4123378 100644
--- a/nerv/examples/lmptb/lstmlm_ptb_main.lua
+++ b/nerv/examples/lmptb/lstmlm_ptb_main.lua
@@ -2,7 +2,7 @@ require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
require 'lmptb.layer.init'
-require 'tnn.init'
+--require 'tnn.init'
require 'lmptb.lmseqreader'
require 'lm_trainer'
@@ -197,10 +197,10 @@ global_conf = {
hidden_size = 300, --set to 400 for a stable good test PPL
chunk_size = 15,
batch_size = 10,
- max_iter = 35,
+ max_iter = 45,
decay_iter = 10,
param_random = function() return (math.random() / 5 - 0.1) end,
- dropout_str = "0.5*15:0",
+ dropout_str = "0.5",
train_fn = train_fn,
valid_fn = valid_fn,
diff --git a/nerv/examples/lmptb/m-tests/sutil_test.lua b/nerv/examples/lmptb/m-tests/sutil_test.lua
index c2425c2..3f9bf9e 100644
--- a/nerv/examples/lmptb/m-tests/sutil_test.lua
+++ b/nerv/examples/lmptb/m-tests/sutil_test.lua
@@ -1,4 +1,4 @@
-require "tnn.init"
+--require "tnn.init"
ss = "0.1*1:2"
nerv.SUtil.parse_schedule(ss)
diff --git a/nerv/examples/lmptb/rnnlm_ptb_main.lua b/nerv/examples/lmptb/rnnlm_ptb_main.lua
index 35b2e08..ca62023 100644
--- a/nerv/examples/lmptb/rnnlm_ptb_main.lua
+++ b/nerv/examples/lmptb/rnnlm_ptb_main.lua
@@ -2,7 +2,7 @@ require 'lmptb.lmvocab'
require 'lmptb.lmfeeder'
require 'lmptb.lmutil'
require 'lmptb.layer.init'
-require 'tnn.init'
+--require 'tnn.init'
require 'lmptb.lmseqreader'
require 'lm_trainer'
diff --git a/nerv/init.lua b/nerv/init.lua
index 9c1a5c8..b5d20a2 100644
--- a/nerv/init.lua
+++ b/nerv/init.lua
@@ -130,3 +130,4 @@ nerv.include('matrix/init.lua')
nerv.include('io/init.lua')
nerv.include('layer/init.lua')
nerv.include('nn/init.lua')
+nerv.include('tnn/init.lua')
diff --git a/nerv/examples/lmptb/tnn/layers/elem_mul.lua b/nerv/layer/elem_mul.lua
index c809d3e..c809d3e 100644
--- a/nerv/examples/lmptb/tnn/layers/elem_mul.lua
+++ b/nerv/layer/elem_mul.lua
diff --git a/nerv/examples/lmptb/tnn/layers/gate_fff.lua b/nerv/layer/gate_fff.lua
index 751dde1..751dde1 100644
--- a/nerv/examples/lmptb/tnn/layers/gate_fff.lua
+++ b/nerv/layer/gate_fff.lua
diff --git a/nerv/layer/init.lua b/nerv/layer/init.lua
index 32b82d8..23606e1 100644
--- a/nerv/layer/init.lua
+++ b/nerv/layer/init.lua
@@ -100,3 +100,5 @@ nerv.include('mse.lua')
nerv.include('combiner.lua')
nerv.include('affine_recurrent.lua')
nerv.include('softmax.lua')
+nerv.include('elem_mul.lua')
+nerv.include('gate_fff.lua')
diff --git a/nerv/examples/lmptb/tnn/init.lua b/nerv/tnn/init.lua
index 66ea4ed..979f5d8 100644
--- a/nerv/examples/lmptb/tnn/init.lua
+++ b/nerv/tnn/init.lua
@@ -46,6 +46,4 @@ nerv.include('tnn.lua')
nerv.include('layersT/softmax_ce_t.lua')
nerv.include('layersT/lstm_t.lua')
nerv.include('layersT/dropout_t.lua')
-nerv.include('layers/elem_mul.lua')
-nerv.include('layers/gate_fff.lua')
nerv.include('layer_dag_t.lua')
diff --git a/nerv/examples/lmptb/tnn/layer_dag_t.lua b/nerv/tnn/layer_dag_t.lua
index e3a9316..e3a9316 100644
--- a/nerv/examples/lmptb/tnn/layer_dag_t.lua
+++ b/nerv/tnn/layer_dag_t.lua
diff --git a/nerv/examples/lmptb/tnn/layersT/dropout_t.lua b/nerv/tnn/layersT/dropout_t.lua
index 4351285..4351285 100644
--- a/nerv/examples/lmptb/tnn/layersT/dropout_t.lua
+++ b/nerv/tnn/layersT/dropout_t.lua
diff --git a/nerv/examples/lmptb/tnn/layersT/lstm_t.lua b/nerv/tnn/layersT/lstm_t.lua
index ded6058..ded6058 100644
--- a/nerv/examples/lmptb/tnn/layersT/lstm_t.lua
+++ b/nerv/tnn/layersT/lstm_t.lua
diff --git a/nerv/examples/lmptb/tnn/layersT/softmax_ce_t.lua b/nerv/tnn/layersT/softmax_ce_t.lua
index a9ce975..a9ce975 100644
--- a/nerv/examples/lmptb/tnn/layersT/softmax_ce_t.lua
+++ b/nerv/tnn/layersT/softmax_ce_t.lua
diff --git a/nerv/examples/lmptb/tnn/sutil.lua b/nerv/tnn/sutil.lua
index f5bc408..f5bc408 100644
--- a/nerv/examples/lmptb/tnn/sutil.lua
+++ b/nerv/tnn/sutil.lua
diff --git a/nerv/examples/lmptb/tnn/tnn.lua b/nerv/tnn/tnn.lua
index 56c9dc0..56c9dc0 100644
--- a/nerv/examples/lmptb/tnn/tnn.lua
+++ b/nerv/tnn/tnn.lua