aboutsummaryrefslogtreecommitdiff
path: root/nerv/tnn/init.lua
blob: 7faca31a71321d8222ebc1e8d2f9304283bef1e0 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
local LayerT = nerv.class('nerv.LayerT')

function LayerT:__init(id, global_conf, layer_conf)
    nerv.error_method_not_implemented()
end

function LayerT:init(batch_size, chunk_size)
    nerv.error_method_not_implemented()
end

function LayerT:update(bp_err, input, output, t)
    nerv.error_method_not_implemented()
end

function LayerT:propagate(input, output, t)
    nerv.error_method_not_implemented()
end

function LayerT:back_propagate(bp_err, next_bp_err, input, output, t)
    nerv.error_method_not_implemented()
end

function LayerT:check_dim_len(len_in, len_out)
    local expected_in = #self.dim_in
    local expected_out = #self.dim_out
    if len_in > 0 and expected_in ~= len_in then
        nerv.error("layer %s expects %d inputs, %d given",
                    self.id, len_in, expected_in)
    end
    if len_out > 0 and expected_out ~= len_out then
        nerv.error("layer %s expects %d outputs, %d given",
                    self.id, len_out, expected_out)
    end
end

LayerT.find_param = nerv.Layer.find_param

function LayerT:get_params()
    nerv.error_method_not_implemented()
end

function LayerT:get_dim()
    return self.dim_in, self.dim_out
end

nerv.include('sutil.lua')
nerv.include('tnn.lua')
nerv.include('layersT/softmax_ce_t.lua')
nerv.include('layersT/lstm_t.lua')
nerv.include('layersT/gru_t.lua')
nerv.include('layersT/dropout_t.lua')
nerv.include('layer_dag_t.lua')