aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcloudygoose <cloudygooseg@gmail.com>2015-06-12 12:27:36 +0800
committercloudygoose <cloudygooseg@gmail.com>2015-06-12 12:27:36 +0800
commit83dbefa3435b72911ee643f9f99205247d140279 (patch)
tree819edfc2f32080eb398bbb736a61e27db093b317
parentc3db7ffba45b7e4d0a1d76281e187b3f88129db9 (diff)
doc change
-rw-r--r--Makefile4
-rw-r--r--doc/nerv.md2
-rw-r--r--doc/nerv_layer.md10
-rw-r--r--doc/nerv_nn.md37
4 files changed, 44 insertions, 9 deletions
diff --git a/Makefile b/Makefile
index 448e003..8f1d491 100644
--- a/Makefile
+++ b/Makefile
@@ -12,8 +12,8 @@ LUA_LIBS := matrix/init.lua io/init.lua nerv.lua \
nn/init.lua nn/layer_repo.lua nn/param_repo.lua nn/layer_dag.lua \
io/sgd_buffer.lua
INCLUDE := -I build/luajit-2.0/include/luajit-2.0/ -DLUA_USE_APICHECK
-#CUDA_BASE := /usr/local/cuda-6.5
-CUDA_BASE := /usr/local/cuda-5.0
+CUDA_BASE := /usr/local/cuda-6.5
+#CUDA_BASE := /usr/local/cuda-5.0
CUDA_INCLUDE := -I $(CUDA_BASE)/include/
INCLUDE += $(CUDA_INCLUDE)
LDFLAGS := -L$(CUDA_BASE)/lib64/ -Wl,-rpath=$(CUDA_BASE)/lib64/ -lcudart -lcublas
diff --git a/doc/nerv.md b/doc/nerv.md
index 22b4072..28411f5 100644
--- a/doc/nerv.md
+++ b/doc/nerv.md
@@ -13,3 +13,5 @@ A registered function, the original function is `luaT_lua_getmetatable`. `tname`
A registered function, the original function is `luaT_newmetatable`, it returns the metatable of the created class by the name `tname`.
* __string = nerv.setmetatable(table self, string tname)__
A registered function, the original function is `luaT_lua_setmetatable`. It assigns the metatable registered in __luaT__ by the name *tname* to the table *self*. And return *tname* to user.
+* __table = nerv.get_type(string typename)__
+Returns the type(`loadstring("return " .. typename)`). \ No newline at end of file
diff --git a/doc/nerv_layer.md b/doc/nerv_layer.md
index 0425d5f..ac6480c 100644
--- a/doc/nerv_layer.md
+++ b/doc/nerv_layer.md
@@ -141,7 +141,8 @@ print('network input&output&error space allocation...')
affineI = {dataM} --input to the network is data
affineO = {nerv.CuMatrixFloat(data_num, 2)}
softmaxI = {affineO[1], labelM}
-softmaxO = {nerv.CuMatrixFloat(data_num, 2)}
+softmaxO = {}
+output = nerv.CuMatrixFloat(data_num, 2)
affineE = {nerv.CuMatrixFloat(data_num, 2)}
--[[space allocation end]]--
@@ -152,9 +153,9 @@ ce_last = 0
for l = 0, 10, 1 do
affineL:propagate(affineI, affineO)
softmaxL:propagate(softmaxI, softmaxO)
- softmaxO[1]:softmax(softmaxI[1])
+ output:softmax(softmaxI[1])
- softmaxL:back_propagate(affineE, nil, softmaxI, softmaxO)
+ softmaxL:back_propagate(affineE, {}, softmaxI, softmaxO)
affineL:update(affineE, affineI, affineO)
@@ -162,10 +163,9 @@ for l = 0, 10, 1 do
nerv.utils.printf("training iteration %d finished\n", l)
nerv.utils.printf("cross entropy: %.8f\n", softmaxL.total_ce - ce_last)
ce_last = softmaxL.total_ce
- nerv.utils.printf("accurate labels: %d\n", calculate_accurate(softmaxO[1], labelM))
+ nerv.utils.printf("accurate labels: %d\n", calculate_accurate(output, labelM))
nerv.utils.printf("total frames processed: %.8f\n", softmaxL.total_frames)
end
end
--[[end training]]--
-
``` \ No newline at end of file
diff --git a/doc/nerv_nn.md b/doc/nerv_nn.md
index 54c7165..0f274c4 100644
--- a/doc/nerv_nn.md
+++ b/doc/nerv_nn.md
@@ -12,7 +12,24 @@ it contains __nerv.LayerRepo__, __nerv.ParamRepo__, and __nerv.DAGLayer__(inheri
* `table layers` Contains the mapping of layer ID to layer object.
objects.
* __nerv.DAGLayer__ inherits __nerv.Layer__.
-
+ * `table layers` Mapping from a layer ID to its "ref". A ref is of the structure below:
+ ```
+ nerv.Layer layer --its layer
+ nerv.Matrix inputs
+ nerv.Matrix outputs
+ nerv.Matrix err_inputs
+ nerv.Matrix err_outputs
+ table next_layers
+ int input_len -- #dim_in
+ int output_len -- #dim_out
+ int in_deg
+ bool visited -- used in topology sort
+ ```
+ * `inputs`
+ * `outputs`
+ * `parsed_conn`
+ * `queue`
+
##Methods##
###__nerv.ParamRepo__###
* __void ParamRepo:\_\_init(table param_files)__
@@ -29,4 +46,20 @@ llist : {layer1, layer2, ...}
layer : layerid = {param_config, layer_config}
param_config : {param1 = paramID1, param2 = paramID2}
```
-__LayerRepo__ will merge `param_config` into `layer_config` and construct a layer by calling `layer_type(layerid, global_conf, layer_config)`. \ No newline at end of file
+__LayerRepo__ will merge `param_config` into `layer_config` and construct a layer by calling `layer_type(layerid, global_conf, layer_config)`.
+
+* __[nerv.Layer] LayerRepo.get_layer([LayerRepo] self, [string] lid)__
+`self`, __nerv.LayerRepo__, ...
+Returns the layer with ID `lid`.
+
+###__nerv.DAGLayer__###
+* __DAGLayer:\_\_init(id, global_conf, layer_conf, [a, b, ...])__
+ Returns:
+ __string__, dfdfdfddf
+ __asasa__, asasasasa
+ Parameters:
+ `id`: __string__, the ID of the layer.
+ `global_conf`:__table__,the global config.
+
+ sasasa
+ \ No newline at end of file