You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

117 lines
3.5 KiB

defmodule P1old do
import Matrex.Operators
import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
import Matrex
@moduledoc """
Documentation for `P1`.
"""
@doc """
Run to start app.
"""
@p 2
@topology new([[@p, 4, 8, 1]])
@lr 0.1
@rows 500
@res 50
def start(_type, _args) do
IO.puts "starting"
Enum.reduce(0..2500, create_nn(@topology), fn i, neural_net_acc ->
{_a, n} = train(neural_net_acc, Dataset.points, Dataset.values)
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
if rem(i, 25) == 0 do
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(n, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Pensando----------------------"])
#:timer.sleep(250)
end
n
end)
IO.puts "Ending"
:timer.sleep(1000)
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
end
def train(neural_net, x, y, train \\ true) do
# Forward pass
out = Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
z = Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b)
acc ++ [{z, sigmoid(z)}]
end)
# Backward pass
if train do
{out_o, _delta, neural_net_o, _w} = Enum.reduce(Enum.count(neural_net)-1..0, {out, [], neural_net, nil}, fn l, {out_acc, delta_acc, neural_net_acc, w_acc} ->
{z, a} = Enum.at(out_acc, l+1)
d = case l == Enum.count(neural_net)-1 do
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
end
neural_net_acc_N = List.update_at(neural_net_acc, l, fn n ->
%{
b: n.b - multiply(Toolex.mean(Enum.at(d, 0)), @lr),
w: n.w - multiply(dot(transpose(elem(Enum.at(out_acc, l),1)), Enum.at(d, 0)), @lr)
}
end)
{out_acc, d, neural_net_acc_N, transpose(Enum.at(neural_net, l).w)}
end)
{elem(List.last(out_o),1), neural_net_o}
else
{elem(List.last(out),1), neural_net}
end
end
def set_points(m, points, values) do
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
set(acc, trunc(points[p][1]), trunc(points[p][2]),
(case trunc(values[p]) do
0 -> 0
1 -> 1
end))
end)
end
def set_graph(m, points) do
size = points[:columns]
points = (size + 1) - (sigmoid(points) * size) |> truncate
Enum.reduce(1..size, m, fn p, acc ->
set(acc, trunc(points[p]), p, 1)
end)
end
def neural_layer(n_conn, n_neur) do
%{
b: random(1, n_neur) |> multiply(2) |> subtract(1),
w: random(n_conn, n_neur) |> multiply(2) |> subtract(1)
}
end
def create_nn(topology) do
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
end
def loop do
:timer.sleep(16);
zeros(50)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit)
loop
end
end