Browse Source

refactor

master
Gustavo Adolfo Mesa Roldán 3 years ago
parent
commit
4b2331c75e
1 changed files with 57 additions and 58 deletions
  1. +57
    -58
      lib/p1.ex

+ 57
- 58
lib/p1.ex View File

@ -14,6 +14,7 @@ defmodule P1 do
@lr 0.1
@rows 500
@res 50
@n_train 2500
def start(_type, _args) do
IO.puts "starting"
@ -23,85 +24,83 @@ defmodule P1 do
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
end
def loop_main(neural_net, n, 2500) do
# Looping functions
def loop_main(neural_net, n, @n_train), do:
neural_net
end
def loop_main(neural_net, n, i) do
def loop_main(neural_net, n, i), do:
loop_main(n, i)
end
def loop_main(neural_net, i \\ 0) do
{a, n} = train(neural_net, Dataset.points, Dataset.values)
if rem(i, 25) == 0 do
paint(n)
end
loop_main(neural_net, n, i+1)
with {a, n} <- train(neural_net, Dataset.points, Dataset.values) do
with 0 <- rem(i, 25), do: draw(n)
loop_main(neural_net, n, i+1)
end
end
def paint(nn) do
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(nn, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Solucion----------------------"])
# Train functions
def forward_pass(neural_net, x) do
Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
with z <- Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b), do: acc ++ [{z, sigmoid(z)}]
end)
end
def train(neural_net, x, y, train \\ true) do
# Forward pass
out = Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
z = Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b)
acc ++ [{z, sigmoid(z)}]
def backward_pass(neural_net, x, y, out) do
Enum.reduce(Enum.count(neural_net)-1..0, {neural_net, out, [], nil}, fn l, {neural_net_acc, out_acc, delta_acc, w_acc} ->
with {z, a} <- Enum.at(out_acc, l+1), delta <- get_delta(neural_net, delta_acc, w_acc, a, y, l), do:
{update_nn(neural_net_acc, out_acc, l, delta), out_acc, delta, transpose(Enum.at(neural_net, l).w)}
end)
end
# Backward pass
if train do
{out_o, _delta, neural_net_o, _w} = Enum.reduce(Enum.count(neural_net)-1..0, {out, [], neural_net, nil}, fn l, {out_acc, delta_acc, neural_net_acc, w_acc} ->
{z, a} = Enum.at(out_acc, l+1)
d = case l == Enum.count(neural_net)-1 do
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
end
neural_net_acc_N = List.update_at(neural_net_acc, l, fn n ->
%{
b: n.b - multiply(Toolex.mean(Enum.at(d, 0)), @lr),
w: n.w - multiply(dot(transpose(elem(Enum.at(out_acc, l),1)), Enum.at(d, 0)), @lr)
}
end)
{out_acc, d, neural_net_acc_N, transpose(Enum.at(neural_net, l).w)}
end)
{elem(List.last(out_o),1), neural_net_o}
else
{elem(List.last(out),1), neural_net}
def get_delta(neural_net, delta_acc, w_acc, a, y, l) do
case l == Enum.count(neural_net)-1 do
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
end
end
def set_points(m, points, values) do
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
set(acc, trunc(points[p][1]), trunc(points[p][2]),
(case trunc(values[p]) do
0 -> 0
1 -> 1
end))
def update_nn(neural_net, out, l, delta) do
List.update_at(neural_net, l, fn n ->
%{
b: n.b - multiply(Toolex.mean(Enum.at(delta, 0)), @lr),
w: n.w - multiply(dot(transpose(elem(Enum.at(out, l),1)), Enum.at(delta, 0)), @lr)
}
end)
end
def neural_layer(n_conn, n_neur) do
def train(neural_net, x, y, train \\ true) do
case {train, forward_pass(neural_net, x)} do
{true, out} -> with {neural_net_o, out_o, _delta, _w} <- backward_pass(neural_net, x, y, out), do: {elem(List.last(out_o),1), neural_net_o}
{false, out} -> {elem(List.last(out),1), neural_net}
end
end
# Neural net creation functions
def neural_layer(n_conn, n_neur), do:
%{
b: random(1, n_neur) |> multiply(2) |> subtract(1),
w: random(n_conn, n_neur) |> multiply(2) |> subtract(1)
}
end
def create_nn(topology) do
def create_nn(topology), do:
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
end
# Draw functions
def set_points(m, points, values), do:
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
set(acc, trunc(points[p][1]), trunc(points[p][2]),trunc(values[p]))
end)
def generate_draw(nn), do:
with ls <- Tools.linspace(String.to_float("-1.5"), 1.5, @res), do:
Enum.reduce(Enum.with_index(ls), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(ls), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(nn, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
def draw(nn), do:
generate_draw(nn)
|> set_points(Dataset.points |> multiply(16.5) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Prediction----------------------"])
end

Loading…
Cancel
Save