summaryrefslogtreecommitdiff
path: root/nerv/layer/tanh.lua
blob: e1c32f21fa1d76a717dc934dd3a750e970baf09b (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
local TanhLayer = nerv.class("nerv.TanhLayer", "nerv.Layer")

function TanhLayer:__init(id, global_conf, layer_conf)
    self.id = id
    self.gconf = global_conf
    self.dim_in = layer_conf.dim_in
    self.dim_out = layer_conf.dim_out
    self:check_dim_len(1, 1)
end

function TanhLayer:init()
    if self.dim_in[1] ~= self.dim_out[1] then
        nerv.error("mismatching dimensions of input and output")
    end
end

function TanhLayer:batch_resize(batch_size)
    -- do nothing
end

function TanhLayer:update(bp_err, input, output)
    -- no params, therefore do nothing
end

function TanhLayer:propagate(input, output)
    output[1]:tanh(input[1])
end

function TanhLayer:back_propagate(bp_err, next_bp_err, input, output)
    next_bp_err[1]:tanh_grad(bp_err[1], output[1])
end

function TanhLayer:get_params()
    return nerv.ParamRepo({})
end