aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitmodules7
-rw-r--r--Makefile47
m---------Penlight0
-rw-r--r--README.md55
-rw-r--r--README.rst64
-rw-r--r--nerv/Makefile33
-rw-r--r--nerv/doc/nerv.md6
-rw-r--r--nerv/doc/nerv_class.md8
-rw-r--r--nerv/doc/nerv_io.md13
-rw-r--r--nerv/doc/nerv_layer.md13
-rw-r--r--nerv/doc/nerv_matrix.md20
-rw-r--r--nerv/doc/nerv_nn.md43
-rw-r--r--nerv/doc/nerv_param.md10
-rw-r--r--nerv/examples/asr_trainer.lua278
-rw-r--r--nerv/examples/network_debug/config.lua62
-rw-r--r--nerv/examples/network_debug/main.lua45
-rw-r--r--nerv/examples/network_debug/network.lua110
-rw-r--r--nerv/examples/network_debug/reader.lua113
-rw-r--r--nerv/examples/network_debug/select_linear.lua59
-rw-r--r--nerv/examples/network_debug/timer.lua33
-rw-r--r--nerv/examples/network_debug/tnn.lua136
-rw-r--r--nerv/examples/swb_baseline.lua84
-rw-r--r--nerv/examples/swb_baseline2.lua203
-rw-r--r--nerv/examples/swb_baseline_basic.lua162
-rw-r--r--nerv/examples/timit_baseline2.lua212
-rw-r--r--nerv/init.lua237
-rw-r--r--nerv/io/init.lua3
-rw-r--r--nerv/io/seq_buffer.lua0
-rw-r--r--nerv/io/sgd_buffer.lua7
-rw-r--r--nerv/layer/affine.lua43
-rw-r--r--nerv/layer/bias.lua15
-rw-r--r--nerv/layer/combiner.lua16
-rw-r--r--nerv/layer/dropout.lua27
-rw-r--r--nerv/layer/duplicate.lua41
-rw-r--r--nerv/layer/elem_mul.lua11
-rw-r--r--nerv/layer/graph.lua156
-rw-r--r--nerv/layer/gru.lua20
-rw-r--r--nerv/layer/identity.lua30
-rw-r--r--nerv/layer/init.lua72
-rw-r--r--nerv/layer/lstm.lua192
-rw-r--r--nerv/layer/lstm_gate.lua17
-rw-r--r--nerv/layer/mse.lua16
-rw-r--r--nerv/layer/rnn.lua42
-rw-r--r--nerv/layer/sigmoid.lua17
-rw-r--r--nerv/layer/softmax.lua11
-rw-r--r--nerv/layer/softmax_ce.lua16
-rw-r--r--nerv/layer/tanh.lua11
-rw-r--r--nerv/layer/window.lua15
-rw-r--r--nerv/lib/cblas.h596
-rw-r--r--nerv/lib/common.c15
-rw-r--r--nerv/lib/common.h26
-rw-r--r--nerv/lib/matrix/cuda_helper.h60
-rw-r--r--nerv/lib/matrix/cukernel.h2
-rw-r--r--nerv/lib/matrix/cumatrix.c133
-rw-r--r--nerv/lib/matrix/cumatrix.h20
-rw-r--r--nerv/lib/matrix/generic/cukernel.cu7
-rw-r--r--nerv/lib/matrix/generic/cumatrix.c151
-rw-r--r--nerv/lib/matrix/generic/cumatrix.h104
-rw-r--r--nerv/lib/matrix/generic/matrix.c13
-rw-r--r--nerv/lib/matrix/generic/matrix.h7
-rw-r--r--nerv/lib/matrix/generic/mmatrix.c118
-rw-r--r--nerv/lib/matrix/generic/mmatrix.h60
-rw-r--r--nerv/lib/matrix/mmatrix.c38
-rw-r--r--nerv/lib/matrix/mmatrix.h12
-rw-r--r--nerv/matrix/cumatrix.c61
-rw-r--r--nerv/matrix/generic/cumatrix.c55
-rw-r--r--nerv/matrix/generic/matrix.c103
-rw-r--r--nerv/matrix/generic/mmatrix.c29
-rw-r--r--nerv/matrix/init.lua44
-rw-r--r--nerv/matrix/matrix.h24
-rw-r--r--nerv/matrix/mmatrix.c51
-rw-r--r--nerv/nerv42
-rw-r--r--nerv/nerv-scm-1.rockspec4
-rw-r--r--nerv/nn/init.lua2
-rw-r--r--nerv/nn/layer_dag.lua356
-rw-r--r--nerv/nn/layer_repo.lua30
-rw-r--r--nerv/nn/network.lua500
-rw-r--r--nerv/nn/param_repo.lua59
-rw-r--r--nerv/test/matrix_func.lua2
-rw-r--r--nerv/test/parse_args.lua15
-rw-r--r--nerv/tnn/init.lua47
-rw-r--r--nerv/tnn/sutil.lua80
-rw-r--r--nerv/tnn/tnn.lua596
83 files changed, 4169 insertions, 2124 deletions
diff --git a/.gitmodules b/.gitmodules
index 9f556c5..2b346c4 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,6 +1,9 @@
[submodule "luajit-2.0"]
path = luajit-2.0
- url = http://luajit.org/git/luajit-2.0.git
+ url = https://speechlab.sjtu.edu.cn/gitlab/nerv-dev/luajit.git
[submodule "luarocks"]
path = luarocks
- url = https://github.com/keplerproject/luarocks.git
+ url = https://speechlab.sjtu.edu.cn/gitlab/nerv-dev/luarocks.git
+[submodule "Penlight"]
+ path = Penlight
+ url = https://speechlab.sjtu.edu.cn/gitlab/nerv-dev/Penlight.git
diff --git a/Makefile b/Makefile
index 0982295..28012da 100644
--- a/Makefile
+++ b/Makefile
@@ -1,19 +1,42 @@
.PHONY: all clean install luajit luarocks speech
+############## EDIT THESE LINES #####################
SHELL := /bin/bash
PREFIX := $(CURDIR)/install/
-all: luajit luarocks install
+#CUDA_BASE := /usr/local/cuda-7.0
+CUDA_BASE := /usr/local/cuda
+BLAS_BASE := /usr/lib/
+BLAS_LDFLAGS := -L$(BLAS_BASE) -Wl,-rpath=$(BLAS_BASE)
+BLAS_TYPE := atlas
+KALDI_BASE := /speechlab/tools/KALDI/kaldi-master/
+#######################################################
+MKL_LDFLAGS := -lmkl_rt
+ATLAS_LDFLAGS := -lcblas -llapack_atlas
+ifeq ($(BLAS_TYPE), mkl)
+BLAS_LDFLAGS += $(MKL_LDFLAGS)
+else ifeq ($(BLAS_TYPE), atlas)
+BLAS_LDFLAGS += $(ATLAS_LDFLAGS)
+else
+$(error Invalid blas type)
+endif
+export CUDA_BASE
+export KALDI_BASE
+export BLAS_LDFLAGS
+
+.PHONY: nerv speech/speech_utils speech/htk_io speech/kaldi_io speech/kaldi_decode \
+ nerv-clean speech/speech_utils-clean speech/htk_io-clean speech/kaldi_io-clean speech/kaldi_decode-clean \
+ Penlight
+
+all: luajit luarocks Penlight nerv
luajit:
PREFIX=$(PREFIX) ./tools/build_luajit.sh
luarocks:
PREFIX=$(PREFIX) ./tools/build_luarocks.sh
-install:
- cd nerv; $(PREFIX)/bin/luarocks make CFLAGS=$(CFLAGS)
-speech:
- cd speech/speech_utils; $(PREFIX)/bin/luarocks make
- cd speech/htk_io; $(PREFIX)/bin/luarocks make
- cd speech/kaldi_io; $(PREFIX)/bin/luarocks make
-clean:
- cd nerv && make clean
- cd speech/speech_utils && make clean
- cd speech/htk_io && make clean
- cd speech/kaldi_io && make clean
+
+speech: speech/speech_utils speech/htk_io speech/kaldi_io speech/kaldi_decode
+speech-clean: speech/speech_utils-clean speech/htk_io-clean speech/kaldi_io-clean speech/kaldi_decode-clean
+clean: nerv-clean speech-clean
+
+nerv Penlight speech/speech_utils speech/htk_io speech/kaldi_io speech/kaldi_decode:
+ cd $@; $(PREFIX)/bin/luarocks make
+nerv-clean speech/speech_utils-clean speech/htk_io-clean speech/kaldi_io-clean speech/kaldi_decode-clean:
+ cd $(subst -clean,,$@); make clean LUA_BINDIR=$(PREFIX)/bin/
diff --git a/Penlight b/Penlight
new file mode 160000
+Subproject 16d149338af9efc910528641c5240c5641aeb8d
diff --git a/README.md b/README.md
deleted file mode 100644
index fe9dfc1..0000000
--- a/README.md
+++ /dev/null
@@ -1,55 +0,0 @@
-#The Nerv Toolkit User Manual#
-NOTE: This readme is obsolete and will be rearranged, for further information, please check http://nerv-sjtu.github.io/nerv/
-
-This user manual will information about how to use __Nerv__ and __Nerv__'s interface.
-
-##How to make and start using##
-First make sure you have __lua__ and __CUDA__ installed on your computer.
-__Nerv__ is currently developed via github.You can download and make __Nerv__ by doing the following:
-```
-cd ~
-git clone https://github.com/Nerv-SJTU/nerv.git
-cd nerv
-git submodule init && git submodule update
-make
-#To include some new CUDA feature(e.x. atomicCAS), use "make CFLAGS=-D__NERV_FUTURE_CUDA_7"
-
-#further, if you want the speech modules
-git clone https://github.com/Nerv-SJTU/nerv-speech.git speech
-make speech
-```
-The `git submodule` command is for the __luajit__ repository inside __Nerv__.
-Now, you can try to run some example scripts.
-```
-./install/bin/nerv examples/cumatrix_example.lua
-```
-To get an example of DNN(for ASR) training, run(this requires the speech modules)
-You need to be at or (copy files from) `/slfs1`(SJTU speechlab cluster) to get this running.
-```
-./install/bin/nerv nerv/examples/asr_trainer.lua nerv/examples/swb_baseline.lua
-```
-
-##How to contribute##
-Fork the original repository, then use the __pull&merge__ function in github to contribute.
-The pull&merge request can be found on your dashboard in github. See this [sync-help] to sync with the original repository.
-
-##Nerv Packages##
-* __luaT__
-Nerv uses [luaT]\(a [Torch] library\) to define lua class in C.
-* __[The Nerv OOP](nerv/doc/nerv_class.md)__
-Enables object-oriented programming in Nerv.
-* __[The Nerv utility functions](nerv/doc/nerv.md)__
-Inlcudes some utility functions from luaT to implement __Nerv.Class__.
-* __[The Nerv Matrix Package](nerv/doc/nerv_matrix.md)__
-The matrix package is a basic package in __Nerv__ that is used to store and manipulate matrices.
-* __[The Nerv IO Package](nerv/doc/nerv_io.md)__
-The IO package is used to read and write parameters to file.
-* __[The Nerv Parameter Package](nerv/doc/nerv_param.md)__
-The parameter package is used to store, read model parameters from file.
-* __[The Nerv Layer Package](nerv/doc/nerv_layer.md)__
-The layer package is used to define propagation and backpropagation of different type of layers.
-* __[The Nerv NN Package](nerv/doc/nerv_nn.md)__
-The nn package is for organizing a neural network, it contains __nerv.LayerRepo__, __nerv.ParamRepo__, and __nerv.DAGLayer__.
-[luaT]:https://github.com/torch/torch7/tree/master/lib/luaT
-[Torch]:https://github.com/torch
-[sync-help]:https://help.github.com/articles/syncing-a-fork/
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..c00743c
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,64 @@
+NERV Toolkit
+============
+
+NOTE: This readme is in-progress.
+
+Installation
+------------
+First, make sure you have at least one implementation of BLAS and CUDA installed
+on your computer.
+
+- Checkout NERV:
+
+ ::
+
+ bash
+ git clone https://speechlab.sjtu.edu.cn/gitlab/nerv-dev/nerv.git
+
+- Checkout submodules (luajit, luarocks, Penlight, etc.):
+
+ ::
+
+ cd nerv
+ git submodule init && git submodule update
+
+- Build NERV: you can specify either ``mkl`` or ``atlas`` to ``BLAS_TYPE``.
+ ``BLAS_BASE`` is the directory containing BLAS ``.so`` files. By default,
+ ``atlas`` is used for ``BLAS_TYPE``, ``/usr/lib/`` is used for ``BLAS_BASE``,
+ and ``/usr/local/cuda`` is used for ``CUDA_BASE``.
+
+ ::
+
+ # an example for compiling on SJTU Speechlab major cluster
+ make BLAS_TYPE=mkl BLAS_BASE=/home/intel/mkl/lib/intel64/ CUDA_BASE=/usr/local/cuda
+
+- To include some new features (e.g. ``atomicCAS`` in CUDA), add corresponding flags to
+ ``NERV_FEAT`` (e.g. ``NERV_FEAT=-D__NERV_FUTURE_CUDA_7``) while making:
+
+ ::
+
+ make NERV_FEAT=-D__NERV_FUTURE_CUDA_7 BLAS_TYPE=mkl BLAS_BASE=/home/intel/mkl/lib/intel64/ CUDA_BASE=/usr/local/cuda
+
+- For speech tasks, you need to install related lua rocks (Lua packages):
+
+ ::
+
+ # checkout speech repository to local directory nerv/speech (suppose you're
+ # still at the root directory of NERV repo)
+ git clone https://speechlab.sjtu.edu.cn/gitlab/nerv-dev/nerv-speech.git speech
+ # build and install HTK I/O support, Kaldi I/O support, Kaldi decoding support, etc.
+ make speech BLAS_TYPE=mkl BLAS_BASE=/home/intel/mkl/lib/intel64/
+
+Example & Tutorial
+------------------
+For speech tasks, please refer to ``tutorial/`` in ``nerv-speech`` repository.
+
+Contribution
+------------
+The basic rule is simple: just fork the original repository, then create a pull
+request (merge request) to the administrator of the project. If you want to fix
+any bugs in existing code, don't hesitate to create a pull (merge) request to
+the repository with clear and detailed analysis of the problem. If you want to
+add additional task-specific functionalities (modules) for speech to NERV,
+please create a luarocks-compliant package and also a pull (merge) request to
+the ``nerv-speech`` repository instead of ``nerv``.
diff --git a/nerv/Makefile b/nerv/Makefile
index c0db53a..68465a1 100644
--- a/nerv/Makefile
+++ b/nerv/Makefile
@@ -1,3 +1,11 @@
+ifndef LUA_BINDIR
+$(error Please build the package via luarocks: `luarocks make`)
+endif
+
+ifndef CUDA_BASE
+$(error CUDA_BASE is not set)
+endif
+
.PHONY: build install clean
SHELL := /bin/bash
@@ -6,14 +14,15 @@ LIB_PATH := $(LUA_BINDIR)/../lib
INC_PATH := $(LUA_BINDIR)/../include/nerv
LUA_DIR = $(INST_LUADIR)/nerv
OBJ_DIR := $(BUILD_DIR)/objs
-ISUBDIR := io matrix luaT
+ISUBDIR := lib matrix lib/io lib/matrix lib/luaT
SUBDIR := matrix io layer examples nn tnn lib/io lib/luaT lib/matrix
INC_SUBDIR := $(addprefix $(INC_PATH)/,$(ISUBDIR))
OBJ_SUBDIR := $(addprefix $(OBJ_DIR)/,$(SUBDIR))
LUA_SUBDIR := $(addprefix $(LUA_DIR)/,$(SUBDIR))
-INCS := common.h matrix/matrix.h io/chunk_file.h luaT/luaT.h
+INCS := lib/common.h lib/matrix/matrix.h lib/matrix/mmatrix.h lib/io/chunk_file.h lib/luaT/luaT.h \
+ matrix/matrix.h
CORE_OBJS := lib/common.o lib/io/chunk_file.o \
lib/matrix/mmatrix.o lib/matrix/cumatrix.o lib/matrix/cukernel.o
NERV_OBJS := nerv.o \
@@ -33,17 +42,17 @@ LUA_LIBS := matrix/init.lua io/init.lua init.lua \
layer/init.lua layer/affine.lua layer/sigmoid.lua layer/tanh.lua layer/softmax_ce.lua layer/softmax.lua \
layer/window.lua layer/bias.lua layer/combiner.lua layer/mse.lua \
layer/elem_mul.lua layer/lstm.lua layer/lstm_gate.lua layer/dropout.lua layer/gru.lua \
- nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
- io/sgd_buffer.lua \
- tnn/init.lua tnn/sutil.lua tnn/tnn.lua
+ layer/graph.lua layer/rnn.lua layer/duplicate.lua layer/identity.lua \
+ nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/network.lua \
+ io/sgd_buffer.lua io/seq_buffer.lua
INCLUDE := -I $(LUA_INCDIR) -DLUA_USE_APICHECK
-#CUDA_BASE := /usr/local/cuda-7.0
-CUDA_BASE := /usr/local/cuda
CUDA_INCLUDE := -I $(CUDA_BASE)/include/
INCLUDE += $(CUDA_INCLUDE)
-LDFLAGS := -L$(CUDA_BASE)/lib64/ -Wl,-rpath=$(CUDA_BASE)/lib64/ -lcudart -lcublas -lcurand
+CUDA_LDFLAGS := -L$(CUDA_BASE)/lib64/ -Wl,-rpath=$(CUDA_BASE)/lib64/ -lcudart -lcuda -lcublas -lcurand
+override CFLAGS += $(NERV_FEAT)
+
NVCC := $(CUDA_BASE)/bin/nvcc
EMPTY :=
SPACE := $(EMPTY) $(EMPTY)
@@ -64,11 +73,11 @@ $(LUA_DIR)/%.lua: %.lua
cp $< $@
$(LIB_PATH)/libnervcore.so: $(CORE_OBJS)
- gcc -shared -o $@ $^ $(LDFLAGS) -lcblas
+ gcc -shared -o $@ $^ $(LDFLAGS) $(CUDA_LDFLAGS) $(BLAS_LDFLAGS)
$(LIB_PATH)/libluaT.so: $(LUAT_OBJS)
- gcc -shared -o $@ $^ $(LDFLAGS)
+ gcc -shared -o $@ $^
$(INST_LIBDIR)/libnerv.so: $(NERV_OBJS) $(LIB_PATH)/libnervcore.so $(LIB_PATH)/libluaT.so
- gcc -shared -o $@ $(NERV_OBJS) $(LDFLAGS) -Wl,-rpath=$(LIB_PATH) -L$(LIB_PATH) -lnervcore -lluaT
+ gcc -shared -o $@ $(NERV_OBJS) -Wl,-rpath=$(LIB_PATH) -L$(LIB_PATH) -lnervcore -lluaT
$(OBJ_DIR)/matrix/cumatrix.o: matrix/generic/cumatrix.c matrix/generic/matrix.c
$(OBJ_DIR)/matrix/mmatrix.o: matrix/generic/mmatrix.c matrix/generic/matrix.c
@@ -82,5 +91,5 @@ clean:
install: $(LIBS) $(LUA_DIR) $(LUA_SUBDIR) $(LUA_LIBS) $(INC_SUBDIR) $(INCS)
-$(INC_PATH)/%.h: lib/%.h
+$(INC_PATH)/%.h: %.h
cp $< $@
diff --git a/nerv/doc/nerv.md b/nerv/doc/nerv.md
index 28411f5..125928d 100644
--- a/nerv/doc/nerv.md
+++ b/nerv/doc/nerv.md
@@ -1,6 +1,6 @@
-#The Nerv utility functions#
+# The Nerv utility functions
Part of the [Nerv](../README.md) toolkit.
-##Methods##
+## Methods
* __string = nerv.typename(obj a)__
A registered function, the original function is `luaT_lua_typename`. In some cases if you call `type(a)` for object of some class in __Nerv__(like __Nerv.CuMatrix__) it will only return "userdata"(because it is created in C), in this case you can use this method to get its type.
@@ -14,4 +14,4 @@ A registered function, the original function is `luaT_newmetatable`, it returns
* __string = nerv.setmetatable(table self, string tname)__
A registered function, the original function is `luaT_lua_setmetatable`. It assigns the metatable registered in __luaT__ by the name *tname* to the table *self*. And return *tname* to user.
* __table = nerv.get_type(string typename)__
-Returns the type(`loadstring("return " .. typename)`). \ No newline at end of file
+Returns the type(`loadstring("return " .. typename)`).
diff --git a/nerv/doc/nerv_class.md b/nerv/doc/nerv_class.md
index 99f63e7..8314b12 100644
--- a/nerv/doc/nerv_class.md
+++ b/nerv/doc/nerv_class.md
@@ -1,10 +1,10 @@
-#The Nerv OOP#
+# The Nerv OOP
Part of the [Nerv](../README.md) toolkit.
-##Methods##
+## Methods
* __metatable mt, metatable mpt = nerv.class(string tname, string parenttname)__
This method is used to create a class by the name `tname`, which inherits `parenttname` in __Nerv__, then you create a new instance of this class by calling `obj=tname(...)`. The `tname.__init(...)` method(if defined) will be called in the constructing. The metatable of the class and its parent class will be returned.
-##Examples##
+## Examples
* This example implements a simple `nerv.Counter` class which is inherited by `nerv.BetterCounter`.
```
@@ -33,4 +33,4 @@ c1 = nerv.Counter(1)
print(c1.c)
bc1 = nerv.BetterCounter(1, 1)
print(bc1.c, bc1.bc)
-``