diff options
Diffstat (limited to 'nerv/nn/layer_dag.lua')
-rw-r--r-- | nerv/nn/layer_dag.lua | 40 |
1 files changed, 36 insertions, 4 deletions
diff --git a/nerv/nn/layer_dag.lua b/nerv/nn/layer_dag.lua index f69d31c..73bb77d 100644 --- a/nerv/nn/layer_dag.lua +++ b/nerv/nn/layer_dag.lua @@ -79,7 +79,7 @@ function DAGLayer:__init(id, global_conf, layer_conf) end table.insert(parsed_conn, - {{ref_from, port_from}, {ref_to, port_to}}) + {{ref_from, port_from}, {ref_to, port_to}}) table.insert(ref_from.next_layers, ref_to) -- add edge ref_to.in_deg = ref_to.in_deg + 1 -- increase the in-degree of the target layer end @@ -140,8 +140,11 @@ function DAGLayer:init(batch_size) ref_from, port_from = unpack(conn[1]) ref_to, port_to = unpack(conn[2]) _, output_dim = ref_from.layer:get_dim() - local mid = self.gconf.cumat_type(batch_size, - output_dim[port_from]) + local dim = 1 + if output_dim[port_from] > 0 then + dim = output_dim[port_from] + end + local mid = self.gconf.cumat_type(batch_size, dim) local err_mid = mid:create() ref_from.outputs[port_from] = mid @@ -176,6 +179,33 @@ function DAGLayer:init(batch_size) end end +function DAGLayer:batch_resize(batch_size) + self.gconf.batch_size = batch_size + + for i, conn in ipairs(self.parsed_conn) do + local _, output_dim + local ref_from, port_from, ref_to, port_to + ref_from, port_from = unpack(conn[1]) + ref_to, port_to = unpack(conn[2]) + _, output_dim = ref_from.layer:get_dim() + + if ref_from.outputs[port_from]:nrow() ~= batch_size and output_dim[port_from] > 0 then + local mid = self.gconf.cumat_type(batch_size, output_dim[port_from]) + local err_mid = mid:create() + + ref_from.outputs[port_from] = mid + ref_to.inputs[port_to] = mid + + ref_from.err_inputs[port_from] = err_mid + ref_to.err_outputs[port_to] = err_mid + end + end + for id, ref in pairs(self.layers) do + ref.layer:batch_resize(batch_size) + end + collectgarbage("collect") +end + function DAGLayer:set_inputs(input) for i = 1, #self.dim_in do if input[i] == nil then @@ -228,11 +258,13 @@ end function DAGLayer:propagate(input, output) self:set_inputs(input) self:set_outputs(output) + local ret = false for i = 1, #self.queue do local ref = self.queue[i] -- print(ref.layer.id) - ref.layer:propagate(ref.inputs, ref.outputs) + ret = ref.layer:propagate(ref.inputs, ref.outputs) end + return ret end function DAGLayer:back_propagate(bp_err, next_bp_err, input, output) |