aboutsummaryrefslogtreecommitdiff
path: root/lua
diff options
context:
space:
mode:
Diffstat (limited to 'lua')
-rw-r--r--lua/config.lua67
-rw-r--r--lua/main.lua45
-rw-r--r--lua/network.lua106
-rw-r--r--lua/reader.lua113
-rw-r--r--lua/select_linear.lua62
-rw-r--r--lua/timer.lua33
-rw-r--r--lua/tnn.lua136
7 files changed, 0 insertions, 562 deletions
diff --git a/lua/config.lua b/lua/config.lua
deleted file mode 100644
index ff98ae0..0000000
--- a/lua/config.lua
+++ /dev/null
@@ -1,67 +0,0 @@
-function get_global_conf()
- local global_conf = {
- lrate = 0.15,
- wcost = 1e-5,
- momentum = 0,
- clip = 5,
- cumat_type = nerv.CuMatrixFloat,
- mmat_type = nerv.MMatrixFloat,
- vocab_size = 10000,
- nn_act_default = 0,
- hidden_size = 300,
- layer_num = 1,
- chunk_size = 15,
- batch_size = 20,
- max_iter = 35,
- param_random = function() return (math.random() / 5 - 0.1) end,
- dropout_rate = 0.5,
- timer = nerv.Timer(),
- pr = nerv.ParamRepo(),
- }
- return global_conf
-end
-
-function get_layers(global_conf)
- local pr = global_conf.pr
- local layers = {
- ['nerv.LSTMLayer'] = {},
- ['nerv.DropoutLayer'] = {},
- ['nerv.SelectLinearLayer'] = {
- ['select'] = {dim_in = {1}, dim_out = {global_conf.hidden_size}, vocab = global_conf.vocab_size, pr = pr},
- },
- ['nerv.CombinerLayer'] = {},
- ['nerv.AffineLayer'] = {
- output = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.vocab_size}, pr = pr}
- },
- ['nerv.SoftmaxCELayer'] = {
- softmax = {dim_in = {global_conf.vocab_size, global_conf.vocab_size}, dim_out = {1}, compressed = true},
- },
- }
- for i = 1, global_conf.layer_num do
- layers['nerv.LSTMLayer']['lstm' .. i] = {dim_in = {global_conf.hidden_size, global_conf.hidden_size, global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, pr = pr}
- layers['nerv.DropoutLayer']['dropout' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size}}
- layers['nerv.CombinerLayer']['dup' .. i] = {dim_in = {global_conf.hidden_size}, dim_out = {global_conf.hidden_size, global_conf.hidden_size}, lambda = {1}}
- end
- return layers
-end
-
-function get_connections(global_conf)
- local connections = {
- {'<input>[1]', 'select[1]', 0},
- {'select[1]', 'lstm1[1]', 0},
- {'dropout' .. global_conf.layer_num .. '[1]', 'output[1]', 0},
- {'output[1]', 'softmax[1]', 0},
- {'<input>[2]', 'softmax[2]', 0},
- {'softmax[1]', '<output>[1]', 0},
- }
- for i = 1, global_conf.layer_num do
- table.insert(connections, {'lstm' .. i .. '[1]', 'dup' .. i .. '[1]', 0})
- table.insert(connections, {'lstm' .. i .. '[2]', 'lstm' .. i .. '[3]', 1})
- table.insert(connections, {'dup' .. i .. '[1]', 'lstm' .. i .. '[2]', 1})
- table.insert(connections, {'dup' .. i .. '[2]', 'dropout' .. i .. '[1]', 0})
- if i > 1 then
- table.insert(connections, {'dropout' .. (i - 1) .. '[1]', 'lstm' .. i .. '[1]', 0})
- end
- end
- return connections
-end
diff --git a/lua/main.lua b/lua/main.lua
deleted file mode 100644
index 39818aa..0000000
--- a/lua/main.lua
+++ /dev/null
@@ -1,45 +0,0 @@
-nerv.include('reader.lua')
-nerv.include('timer.lua')
-nerv.include('config.lua')
-nerv.include(arg[1])
-
-local global_conf = get_global_conf()
-local timer = global_conf.timer
-
-timer:tic('IO')
-
-local data_path = 'nerv/nerv/examples/lmptb/PTBdata/'
-local train_reader = nerv.Reader(data_path .. 'vocab', data_path .. 'ptb.train.txt.adds')
-local val_reader = nerv.Reader(data_path .. 'vocab', data_path .. 'ptb.valid.txt.adds')
-
-local train_data = train_reader:get_all_batch(global_conf)
-local val_data = val_reader:get_all_batch(global_conf)
-
-local layers = get_layers(global_conf)
-local connections = get_connections(global_conf)
-
-local NN = nerv.NN(global_conf, train_data, val_data, layers, connections)
-
-timer:toc('IO')
-timer:check('IO')
-io.flush()
-
-timer:tic('global')
-local best_cv = 1e10
-for i = 1, global_conf.max_iter do
- timer:tic('Epoch' .. i)
- local train_ppl, val_ppl = NN:epoch()
- if val_ppl < best_cv then
- best_cv = val_ppl
- else
- global_conf.lrate = global_conf.lrate / 2.0
- end
- nerv.printf('Epoch %d: %f %f %f\n', i, global_conf.lrate, train_ppl, val_ppl)
- timer:toc('Epoch' .. i)
- timer:check('Epoch' .. i)
- io.flush()
-end
-timer:toc('global')
-timer:check('global')
-timer:check('network')
-timer:check('gc')
diff --git a/lua/network.lua b/lua/network.lua
deleted file mode 100644
index d106ba1..0000000
--- a/lua/network.lua
+++ /dev/null
@@ -1,106 +0,0 @@
-nerv.include('select_linear.lua')
-
-local nn = nerv.class('nerv.NN')
-
-function nn:__init(global_conf, train_data, val_data, layers, connections)
- self.gconf = global_conf
- self.network = self:get_network(layers, connections)
- self.train_data = self:get_data(train_data)
- self.val_data = self:get_data(val_data)
-end
-
-function nn:get_network(layers, connections)
- local layer_repo = nerv.LayerRepo(layers, self.gconf.pr, self.gconf)
- local graph = nerv.GraphLayer('graph', self.gconf,
- {dim_in = {1, self.gconf.vocab_size}, dim_out = {1},
- layer_repo = layer_repo, connections = connections})
- local network = nerv.Network('network', self.gconf,
- {network = graph, clip = self.gconf.clip})
- network:init(self.gconf.batch_size, self.gconf.chunk_size)
- return network
-end
-
-function nn:get_data(data)
- local err_output = {}
- local softmax_output = {}
- local output = {}
- for i = 1, self.gconf.chunk_size do
- err_output[i] = self.gconf.cumat_type(self.gconf.batch_size, 1)
- softmax_output[i] = self.gconf.cumat_type(self.gconf.batch_size, self.gconf.vocab_size)
- output[i] = self.gconf.cumat_type(self.gconf.batch_size, 1)
- end
- local ret = {}
- for i = 1, #data do
- ret[i] = {}
- ret[i].input = {}
- ret[i].output = {}
- ret[i].err_input = {}
- ret[i].err_output = {}
- for t = 1, self.gconf.chunk_size do
- ret[i].input[t] = {}
- ret[i].output[t] = {}
- ret[i].err_input[t] = {}
- ret[i].err_output[t] = {}
- ret[i].input[t][1] = data[i].input[t]
- ret[i].input[t][2] = data[i].output[t]
- ret[i].output[t][1] = output[t]
- local err_input = self.gconf.mmat_type(self.gconf.batch_size, 1)
- for j = 1, self.gconf.batch_size do
- if t <= data[i].seq_len[j] then
- err_input[j - 1][0] = 1
- else
- err_input[j - 1][0] = 0
- end
- end
- ret[i].err_input[t][1] = self.gconf.cumat_type.new_from_host(err_input)
- ret[i].err_output[t][1] = err_output[t]
- ret[i].err_output[t][2] = softmax_output[t]
- end
- ret[i].seq_length = data[i].seq_len
- ret[i].new_seq = {}
- for j = 1, self.gconf.batch_size do
- if data[i].seq_start[j] then
- table.insert(ret[i].new_seq, j)
- end
- end
- end
- return ret
-end
-
-function nn:process(data, do_train)
- local timer = self.gconf.timer
- local total_err = 0
- local total_frame = 0
- for id = 1, #data do
- data[id].do_train = do_train
- timer:tic('network')
- self.network:mini_batch_init(data[id])
- self.network:propagate()
- timer:toc('network')
- for t = 1, self.gconf.chunk_size do
- local tmp = data[id].output[t][1]:new_to_host()
- for i = 1, self.gconf.batch_size do
- if t <= data[id].seq_length[i] then
- total_err = total_err + math.log10(math.exp(tmp[i - 1][0]))
- total_frame = total_frame + 1
- end
- end
- end
- if do_train then
- timer:tic('network')
- self.network:back_propagate()
- self.network:update()
- timer:toc('network')
- end
- timer:tic('gc')
- collectgarbage('collect')
- timer:toc('gc')
- end
- return math.pow(10, - total_err / total_frame)
-end
-
-function nn:epoch()
- local train_error = self:process(self.train_data, true)
- local val_error = self:process(self.val_data, false)
- return train_error, val_error
-end
diff --git a/lua/reader.lua b/lua/reader.lua
deleted file mode 100644
index d2624d3..0000000
--- a/lua/reader.lua
+++ /dev/null
@@ -1,113 +0,0 @@
-local Reader = nerv.class('nerv.Reader')
-
-function Reader:__init(vocab_file, input_file)
- self:get_vocab(vocab_file)
- self:get_seq(input_file)
-end
-
-function Reader:get_vocab(vocab_file)
- local f = io.open(vocab_file, 'r')
- local id = 0
- self.vocab = {}
- while true do
- local word = f:read()
- if word == nil then
- break
- end
- self.vocab[word] = id
- id = id + 1
- end
- self.size = id
-end
-
-function Reader:split(s, t)
- local ret = {}
- for x in (s .. t):gmatch('(.-)' .. t) do
- table.insert(ret, x)
- end
- return ret
-end
-
-function Reader:get_seq(input_file)
- local f = io.open(input_file, 'r')
- self.seq = {}
- while true do
- local seq = f:read()
- if seq == nil then
- break
- end
- seq = self:split(seq, ' ')
- local tmp = {}
- for i = 1, #seq do
- if seq[i] ~= '' then
- table.insert(tmp, self.vocab[seq[i]])
- end
- end
- table.insert(self.seq, tmp)
- end
-end
-
-function Reader:get_in_out(id, pos)
- return self.seq[id][pos], self.seq[id][pos + 1], pos + 1 == #self.seq[id]
-end
-
-function Reader:get_all_batch(global_conf)
- local data = {}
- local pos = {}
- local offset = 1
- for i = 1, global_conf.batch_size do
- pos[i] = nil
- end
- while true do
- --for i = 1, 100 do
- local input = {}
- local output = {}
- for i = 1, global_conf.chunk_size do
- input[i] = global_conf.mmat_type(global_conf.batch_size, 1)
- input[i]:fill(global_conf.nn_act_default)
- output[i] = global_conf.mmat_type(global_conf.batch_size, 1)
- output[i]:fill(global_conf.nn_act_default)
- end
- local seq_start = {}
- local seq_end = {}
- local seq_len = {}
- for i = 1, global_conf.batch_size do
- seq_start[i] = false
- seq_end[i] = false
- seq_len[i] = 0
- end
- local has_new = false
- for i = 1, global_conf.batch_size do
- if pos[i] == nil then
- if offset < #self.seq then
- seq_start[i] = true
- pos[i] = {offset, 1}
- offset = offset + 1
- end
- end
- if pos[i] ~= nil then
- has_new = true
- for j = 1, global_conf.chunk_size do
- local final
- input[j][i-1][0], output[j][i-1][0], final = self:get_in_out(pos[i][1], pos[i][2])
- seq_len[i] = j
- if final then
- seq_end[i] = true
- pos[i] = nil
- break
- end
- pos[i][2] = pos[i][2] + 1
- end
- end
- end
- if not has_new then
- break
- end
- for i = 1, global_conf.chunk_size do
- input[i] = global_conf.cumat_type.new_from_host(input[i])
- output[i] = global_conf.cumat_type.new_from_host(output[i])
- end
- table.insert(data, {input = input, output = output, seq_start = seq_start, seq_end = seq_end, seq_len = seq_len})
- end
- return data
-end
diff --git a/lua/select_linear.lua b/lua/select_linear.lua
deleted file mode 100644
index a7e20cc..0000000
--- a/lua/select_linear.lua
+++ /dev/null
@@ -1,62 +0,0 @@
-local SL = nerv.class('nerv.SelectLinearLayer', 'nerv.Layer')
-
---id: string
---global_conf: table
---layer_conf: table
---Get Parameters
-function SL:__init(id, global_conf, layer_conf)
- self.id = id
- self.dim_in = layer_conf.dim_in
- self.dim_out = layer_conf.dim_out
- self.gconf = global_conf
-
- self.vocab = layer_conf.vocab
- self.ltp = self:find_param("ltp", layer_conf, global_conf, nerv.LinearTransParam, {self.vocab, self.dim_out[1]}) --layer_conf.ltp
-
- self:check_dim_len(1, 1)
-end
-
---Check parameter
-function SL:init(batch_size)
- if (self.dim_in[1] ~= 1) then --one word id
- nerv.error("mismatching dimensions of ltp and input")
- end
- if (self.dim_out[1] ~= self.ltp.trans:ncol()) then
- nerv.error("mismatching dimensions of bp and output")
- end
-
- self.batch_size = bath_size
- self.ltp:train_init()
-end
-
-function SL:update(bp_err, input, output)
- --use this to produce reproducable result, don't forget to set the dropout to zero!
- --for i = 1, input[1]:nrow(), 1 do
- -- local word_vec = self.ltp.trans[input[1][i - 1][0]]
- -- word_vec:add(word_vec, bp_err[1][i - 1], 1, - self.gconf.lrate / self.gconf.batch_size)
- --end
-
- --I tried the update_select_rows kernel which uses atomicAdd, but it generates unreproducable result
- self.ltp.trans:update_select_rows_by_colidx(bp_err[1], input[1], - self.gconf.lrate / self.gconf.batch_size, 0)
- self.ltp.trans:add(self.ltp.trans, self.ltp.trans, 1.0, - self.gconf.lrate * self.gconf.wcost)
-end
-
-function SL:propagate(input, output)
- --for i = 0, input[1]:ncol() - 1, 1 do
- -- if (input[1][0][i] > 0) then
- -- output[1][i]:copy_fromd(self.ltp.trans[input[1][0][i]])
- -- else
- -- output[1][i]:fill(0)
- -- end
- --end
- output[1]:copy_rows_fromd_by_colidx(self.ltp.trans, input[1])
-end
-
-function SL:back_propagate(bp_err, next_bp_err, input, output)
- --input is compressed, do nothing
-end
-
-function SL:get_params()
- local paramRepo = nerv.ParamRepo({self.ltp})
- return paramRepo
-end
diff --git a/lua/timer.lua b/lua/timer.lua
deleted file mode 100644
index 2c54ca8..0000000
--- a/lua/timer.lua
+++ /dev/null
@@ -1,33 +0,0 @@
-local Timer = nerv.class("nerv.Timer")
-
-function Timer:__init()
- self.last = {}
- self.rec = {}
-end
-
-function Timer:tic(item)
- self.last[item] = os.clock()
-end
-
-function Timer:toc(item)
- if (self.last[item] == nil) then
- nerv.error("item not there")
- end
- if (self.rec[item] == nil) then
- self.rec[item] = 0
- end
- self.rec[item] = self.rec[item] + os.clock() - self.last[item]
-end
-
-function Timer:check(item)
- if self.rec[item]==nil then
- nerv.error('item not there')
- end
- nerv.printf('"%s" lasts for %f secs.\n',item,self.rec[item])
-end
-
-function Timer:flush()
- for key, value in pairs(self.rec) do
- self.rec[key] = nil
- end
-end
diff --git a/lua/tnn.lua b/lua/tnn.lua
deleted file mode 100644
index bf9f118..0000000
--- a/lua/tnn.lua
+++ /dev/null
@@ -1,136 +0,0 @@
-nerv.include('select_linear.lua')
-
-local reader = nerv.class('nerv.TNNReader')
-
-function reader:__init(global_conf, data)
- self.gconf = global_conf
- self.offset = 0
- self.data = data
-end
-
-function reader:get_batch(feeds)
- self.offset = self.offset + 1
- if self.offset > #self.data then
- return false
- end
- for i = 1, self.gconf.chunk_size do
- feeds.inputs_m[i][1]:copy_from(self.data[self.offset].input[i])
- feeds.inputs_m[i][2]:copy_from(self.data[self.offset].output[i]:decompress(self.gconf.vocab_size))
- end
- feeds.flags_now = self.data[self.offset].flags
- feeds.flagsPack_now = self.data[self.offset].flagsPack
- return true
-end
-
-function reader:has_data(t, i)
- return t <= self.data[self.offset].seq_len[i]
-end
-
-function reader:get_err_input()
- return self.data[self.offset].err_input
-end
-
-local nn = nerv.class('nerv.NN')
-
-function nn:__init(global_conf, train_data, val_data, layers, connections)
- self.gconf = global_conf
- self.tnn = self:get_tnn(layers, connections)
- self.train_data = self:get_data(train_data)
- self.val_data = self:get_data(val_data)
-end
-
-function nn:get_tnn(layers, connections)
- self.gconf.dropout_rate = 0
- local layer_repo = nerv.LayerRepo(layers, self.gconf.pr, self.gconf)
- local tnn = nerv.TNN('TNN', self.gconf, {dim_in = {1, self.gconf.vocab_size},
- dim_out = {1}, sub_layers = layer_repo, connections = connections,
- clip = self.gconf.clip})
- tnn:init(self.gconf.batch_size, self.gconf.chunk_size)
- return tnn
-end
-
-function nn:get_data(data)
- local ret = {}
- for i = 1, #data do
- ret[i] = {}
- ret[i].input = data[i].input
- ret[i].output = data[i].output
- ret[i].flags = {}
- ret[i].err_input = {}
- for t = 1, self.gconf.chunk_size do
- ret[i].flags[t] = {}
- local err_input = self.gconf.mmat_type(self.gconf.batch_size, 1)
- for j = 1, self.gconf.batch_size do
- if t <= data[i].seq_len[j] then
- ret[i].flags[t][j] = nerv.TNN.FC.SEQ_NORM
- err_input[j - 1][0] = 1
- else
- ret[i].flags[t][j] = 0
- err_input[j - 1][0] = 0
- end
- end
- ret[i].err_input[t] = self.gconf.cumat_type.new_from_host(err_input)
- end
- for j = 1, self.gconf.batch_size do
- if data[i].seq_start[j] then
- ret[i].flags[1][j] = bit.bor(ret[i].flags[1][j], nerv.TNN.FC.SEQ_START)
- end
- if data[i].seq_end[j] then
- local t = data[i].seq_len[j]
- ret[i].flags[t][j] = bit.bor(ret[i].flags[t][j], nerv.TNN.FC.SEQ_END)
- end
- end
- ret[i].flagsPack = {}
- for t = 1, self.gconf.chunk_size do
- ret[i].flagsPack[t] = 0
- for j = 1, self.gconf.batch_size do
- ret[i].flagsPack[t] = bit.bor(ret[i].flagsPack[t], ret[i].flags[t][j])
- end
- end
- ret[i].seq_len = data[i].seq_len
- end
- return ret
-end
-
-function nn:process(data, do_train)
- local total_err = 0
- local total_frame = 0
- local reader = nerv.TNNReader(self.gconf, data)
- while true do
- local r, _ = self.tnn:getfeed_from_reader(reader)
- if not r then
- break
- end
- if do_train then
- self.gconf.dropout_rate = self.gconf.dropout
- else
- self.gconf.dropout_rate = 0
- end
- self.tnn:net_propagate()
- for t = 1, self.gconf.chunk_size do
- local tmp = self.tnn.outputs_m[t][1]:new_to_host()
- for i = 1, self.gconf.batch_size do
- if reader:has_data(t, i) then
- total_err = total_err + math.log10(math.exp(tmp[i - 1][0]))
- total_frame = total_frame + 1
- end
- end
- end
- if do_train then
- local err_input = reader:get_err_input()
- for i = 1, self.gconf.chunk_size do
- self.tnn.err_inputs_m[i][1]:copy_from(err_input[i])
- end
- self.tnn:net_backpropagate(false)
- self.tnn:net_backpropagate(true)
- end
- collectgarbage('collect')
- end
- return math.pow(10, - total_err / total_frame)
-end
-
-function nn:epoch()
- local train_error = self:process(self.train_data, true)
- local val_error = self:process(self.val_data, false)
- return train_error, val_error
-end
27' href='#n927'>927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233