From 2497fd9e7a0fae5ee4887890d7a312e0e08a93b8 Mon Sep 17 00:00:00 2001 From: Determinant Date: Mon, 22 Jun 2015 19:01:29 +0800 Subject: major change: use luarocks to manage project --- layer/softmax_ce.lua | 68 ---------------------------------------------------- 1 file changed, 68 deletions(-) delete mode 100644 layer/softmax_ce.lua (limited to 'layer/softmax_ce.lua') diff --git a/layer/softmax_ce.lua b/layer/softmax_ce.lua deleted file mode 100644 index daf891e..0000000 --- a/layer/softmax_ce.lua +++ /dev/null @@ -1,68 +0,0 @@ -local SoftmaxCELayer = nerv.class("nerv.SoftmaxCELayer", "nerv.Layer") - -function SoftmaxCELayer:__init(id, global_conf, layer_conf) - self.id = id - self.gconf = global_conf - self.dim_in = layer_conf.dim_in - self.dim_out = layer_conf.dim_out - self.compressed = layer_conf.compressed - if self.compressed == nil then - self.compressed = false - end - self:check_dim_len(2, -1) -- two inputs: nn output and label -end - -function SoftmaxCELayer:init(batch_size) - if not self.compressed and (self.dim_in[1] ~= self.dim_in[2]) then - nerv.error("mismatching dimensions of previous network output and labels") - end - self.total_ce = 0.0 - self.total_correct = 0 - self.total_frames = 0 - self.softmax = self.gconf.cumat_type(batch_size, self.dim_in[1]) - self.ce = self.softmax:create() -end - -function SoftmaxCELayer:update(bp_err, input, output) - -- no params, therefore do nothing -end - -function SoftmaxCELayer:propagate(input, output) - local softmax = self.softmax - local ce = self.ce - local classified = softmax:softmax(input[1]) - local label = input[2] - ce:log_elem(softmax) - if self.compressed then - label = label:decompress(input[1]:ncol()) - end - ce:mul_elem(ce, label) - ce = ce:rowsum() - if output[1] ~= nil then - output[1]:copy_fromd(ce) - end - -- add total ce - self.total_ce = self.total_ce - ce:colsum()[0] - self.total_frames = self.total_frames + softmax:nrow() - -- TODO: add colsame for uncompressed label - if self.compressed then - self.total_correct = self.total_correct + classified:colsame(input[2])[0] - end -end - -function SoftmaxCELayer:back_propagate(bp_err, next_bp_err, input, output) - -- softmax output - label - local label = input[2] - if self.compressed then - label = label:decompress(input[1]:ncol()) - end - local nbe = next_bp_err[1] - nbe:add(self.softmax, label, 1.0, -1.0) - if bp_err[1] ~= nil then - nbe:scale_rows_by_col(bp_err[1]) - end -end - -function SoftmaxCELayer:get_params() - return nerv.ParamRepo({}) -end -- cgit v1.2.3