aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortxh18 <cloudygooseg@gmail.com>2015-10-23 20:58:27 +0800
committertxh18 <cloudygooseg@gmail.com>2015-10-23 20:58:27 +0800
commit8e7d9453520840b6a5e269a101ca72b4a7ab36fa (patch)
tree1c4278c0939d8fa07487aac6993f677f0480323f
parent1234c026869ab052e898cc2541143fe4a22312b6 (diff)
lmptb can be run after merge from master
-rw-r--r--nerv/examples/lmptb/lmptb/lmutil.lua5
-rw-r--r--nerv/examples/lmptb/main.lua13
l---------[-rwxr-xr-x]nerv/examples/lmptb/nerv4
-rwxr-xr-xnerv/examples/lmptb/nerv-old3
4 files changed, 14 insertions, 11 deletions
diff --git a/nerv/examples/lmptb/lmptb/lmutil.lua b/nerv/examples/lmptb/lmptb/lmutil.lua
index 73cf041..7f45a49 100644
--- a/nerv/examples/lmptb/lmptb/lmutil.lua
+++ b/nerv/examples/lmptb/lmptb/lmutil.lua
@@ -102,6 +102,11 @@ function Result:ppl_all(cla)
return math.pow(10, -(c.logp_all) / (c.cn_w + c.cn_sen))
end
+function Result:logp_sample(cla)
+ local c = self[cla]
+ return c.logp_all / (c.cn_w + c.cn_sen)
+end
+
function Result:status(cla)
return "LMResult status of " .. cla .. ": " .. "<SEN_CN " .. self[cla].cn_sen .. "> <W_CN " .. self[cla].cn_w .. "> <PPL_NET " .. self:ppl_net(cla) .. "> <PPL_OOV " .. self:ppl_all(cla) .. "> <LOGP " .. self[cla].logp_all .. ">"
end
diff --git a/nerv/examples/lmptb/main.lua b/nerv/examples/lmptb/main.lua
index 8764998..74ce407 100644
--- a/nerv/examples/lmptb/main.lua
+++ b/nerv/examples/lmptb/main.lua
@@ -220,6 +220,7 @@ function propagateFile(global_conf, dagL, fn, config)
end
if (result["rnn"].cn_w % global_conf.log_w_num == 0) then
printf("%s %d words processed %s.\n", global_conf.sche_log_pre, result["rnn"].cn_w, os.date())
+ printf("\t%s log prob per sample :%f.\n", global_conf.sche_log_pre, result:logp_sample("rnn"));
for key, value in pairs(global_conf.timer.rec) do
printf("\t [global_conf.timer]: time spent on %s:%.5fs\n", key, value)
end
@@ -255,10 +256,11 @@ end
local set = "ptb"
if (set == "ptb") then
- train_fn = "/slfs1/users/txh18/workspace/nerv-project/nerv/nerv/examples/lmptb/PTBdata/ptb.train.txt"
- valid_fn = "/slfs1/users/txh18/workspace/nerv-project/nerv/nerv/examples/lmptb/PTBdata/ptb.valid.txt"
- test_fn = "/slfs1/users/txh18/workspace/nerv-project/nerv/nerv/examples/lmptb/PTBdata/ptb.test.txt"
- work_dir_base = "/slfs1/users/txh18/workspace/nerv-project/lmptb-work"
+ data_dir = "/home/slhome/txh18/workspace/nerv/nerv/nerv/examples/lmptb/PTBdata"
+ train_fn = data_dir.."/ptb.train.txt"
+ valid_fn = data_dir.."/ptb.valid.txt"
+ test_fn = data_dir.."/ptb.test.txt"
+ work_dir_base = "/home/slhome/txh18/workspace/nerv/lmptb-work"
global_conf = {
lrate = 1, wcost = 1e-6, momentum = 0,
cumat_type = nerv.CuMatrixFloat,
@@ -275,7 +277,7 @@ if (set == "ptb") then
valid_fn = valid_fn,
test_fn = test_fn,
sche_log_pre = "[SCHEDULER]:",
- log_w_num = 50000, --give a message when log_w_num words have been processed
+ log_w_num = 10000, --give a message when log_w_num words have been processed
timer = nerv.Timer()
}
global_conf.work_dir = work_dir_base.."/h"..global_conf.hidden_size.."bp"..global_conf.bptt.."slr"..global_conf.lrate..os.date("_%bD%dH%H")
@@ -323,6 +325,7 @@ os.execute("mkdir -p "..global_conf.work_dir)
scheduler = " printf(\"===INITIAL VALIDATION===\\n\") \
dagL, paramRepo = load_net(global_conf) \
+ printf(\"===INITIAL VALIDATION===\\n\") \
local result = propagateFile(global_conf, dagL, global_conf.valid_fn, {do_train = false, report_word = false}) \
ppl_rec = {} \
lr_rec = {} \
diff --git a/nerv/examples/lmptb/nerv b/nerv/examples/lmptb/nerv
index 8829556..e0dbb49 100755..120000
--- a/nerv/examples/lmptb/nerv
+++ b/nerv/examples/lmptb/nerv
@@ -1,3 +1 @@
-#!/bin/sh
-
-exec '/home/slhome/txh18/workspace/nerv-project/nerv-1/install/bin/luajit' -e 'package.path="/home/slhome/txh18/.luarocks/share/lua/5.1/?.lua;/home/slhome/txh18/.luarocks/share/lua/5.1/?/init.lua;/home/slhome/txh18/workspace/nerv-project/nerv-1/install/share/lua/5.1/?.lua;/home/slhome/txh18/workspace/nerv-project/nerv-1/install/share/lua/5.1/?/init.lua;"..package.path; package.cpath="/home/slhome/txh18/.luarocks/lib/lua/5.1/?.so;/home/slhome/txh18/workspace/nerv-project/nerv-1/install/lib/lua/5.1/?.so;"..package.cpath' -e 'local k,l,_=pcall(require,"luarocks.loader") _=k and l.add_context("nerv","scm-1")' '/home/slhome/txh18/workspace/nerv-project/nerv-1/install/lib/luarocks/rocks/nerv/scm-1/bin/nerv' "$@"
+../../../install/bin/nerv \ No newline at end of file
diff --git a/nerv/examples/lmptb/nerv-old b/nerv/examples/lmptb/nerv-old
deleted file mode 100755
index 4912bed..0000000
--- a/nerv/examples/lmptb/nerv-old
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-NERV_PATH="../.."
-exec "${NERV_PATH}/build/luajit-2.0/bin/luajit" -e "package.cpath=\"${NERV_PATH}/build/lib/?.so\"" -e "package.path=\"${NERV_PATH}/build/lua/?/init.lua;${NERV_PATH}/build/lua/?.lua;./?.lua;./?/init.lua\"" -e "require 'nerv'" "$@"