defmodule P1 do
|
|
import Matrex.Operators
|
|
import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
|
|
import Matrex
|
|
@moduledoc """
|
|
Documentation for `P1`.
|
|
"""
|
|
|
|
@doc """
|
|
Run to start app.
|
|
"""
|
|
@p 2
|
|
@topology new([[@p, 4, 8, 1]])
|
|
@lr 0.1
|
|
@rows 500
|
|
@res 50
|
|
|
|
def start(_type, _args) do
|
|
IO.puts "starting"
|
|
loop_main(create_nn(@topology))
|
|
IO.puts "Ending"
|
|
:timer.sleep(1000)
|
|
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
|
|
end
|
|
|
|
def loop_main(neural_net, n, 2500) do
|
|
neural_net
|
|
end
|
|
|
|
def loop_main(neural_net, n, i) do
|
|
loop_main(n, i)
|
|
end
|
|
|
|
def loop_main(neural_net, i \\ 0) do
|
|
{a, n} = train(neural_net, Dataset.points, Dataset.values)
|
|
if rem(i, 25) == 0 do
|
|
paint(n)
|
|
end
|
|
|
|
loop_main(neural_net, n, i+1)
|
|
end
|
|
|
|
def paint(nn) do
|
|
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
|
|
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
|
|
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
|
|
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
|
|
set(acc2, i1+1, j1+1, elem(train(nn, new([[i2, j2]]), Dataset.values, false), 0)[1])
|
|
end)
|
|
end)
|
|
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|
|
|> heatmap(:color24bit, [title: "----------------------Solucion----------------------"])
|
|
end
|
|
|
|
def train(neural_net, x, y, train \\ true) do
|
|
# Forward pass
|
|
out = Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
|
|
z = Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b)
|
|
acc ++ [{z, sigmoid(z)}]
|
|
end)
|
|
|
|
# Backward pass
|
|
if train do
|
|
{out_o, _delta, neural_net_o, _w} = Enum.reduce(Enum.count(neural_net)-1..0, {out, [], neural_net, nil}, fn l, {out_acc, delta_acc, neural_net_acc, w_acc} ->
|
|
{z, a} = Enum.at(out_acc, l+1)
|
|
|
|
d = case l == Enum.count(neural_net)-1 do
|
|
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
|
|
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
|
|
end
|
|
|
|
neural_net_acc_N = List.update_at(neural_net_acc, l, fn n ->
|
|
%{
|
|
b: n.b - multiply(Toolex.mean(Enum.at(d, 0)), @lr),
|
|
w: n.w - multiply(dot(transpose(elem(Enum.at(out_acc, l),1)), Enum.at(d, 0)), @lr)
|
|
}
|
|
end)
|
|
|
|
{out_acc, d, neural_net_acc_N, transpose(Enum.at(neural_net, l).w)}
|
|
end)
|
|
{elem(List.last(out_o),1), neural_net_o}
|
|
else
|
|
{elem(List.last(out),1), neural_net}
|
|
end
|
|
end
|
|
|
|
def set_points(m, points, values) do
|
|
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
|
|
set(acc, trunc(points[p][1]), trunc(points[p][2]),
|
|
(case trunc(values[p]) do
|
|
0 -> 0
|
|
1 -> 1
|
|
end))
|
|
end)
|
|
end
|
|
|
|
def neural_layer(n_conn, n_neur) do
|
|
%{
|
|
b: random(1, n_neur) |> multiply(2) |> subtract(1),
|
|
w: random(n_conn, n_neur) |> multiply(2) |> subtract(1)
|
|
}
|
|
end
|
|
|
|
def create_nn(topology) do
|
|
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
|
|
end
|
|
end
|