local TanhLayer = nerv.class("nerv.TanhLayer", "nerv.Layer") function TanhLayer:__init(id, global_conf, layer_conf) nerv.Layer.__init(self, id, global_conf, layer_conf) self:check_dim_len(1, 1) end function TanhLayer:bind_params() -- do nothing end function TanhLayer:init() if self.dim_in[1] ~= self.dim_out[1] then nerv.error("mismatching dimensions of input and output") end end function TanhLayer:batch_resize(batch_size) -- do nothing end function TanhLayer:update(bp_err, input, output) -- no params, therefore do nothing end function TanhLayer:propagate(input, output) output[1]:tanh(input[1]) end function TanhLayer:back_propagate(bp_err, next_bp_err, input, output) next_bp_err[1]:tanh_grad(bp_err[1], output[1]) end function TanhLayer:get_params() return nerv.ParamRepo({}, self.loc_type) end