|
defmodule P1 do
|
|
import Matrex.Operators
|
|
import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
|
|
import Matrex
|
|
@moduledoc """
|
|
Documentation for `P1`.
|
|
"""
|
|
|
|
@doc """
|
|
Run to start app.
|
|
"""
|
|
@p 2
|
|
@topology new([[@p, 4, 8, 1]])
|
|
@lr 0.1
|
|
@rows 500
|
|
@res 50
|
|
@n_train 2500
|
|
|
|
def start(_type, _args) do
|
|
IO.puts "starting"
|
|
loop_main(create_nn(@topology))
|
|
IO.puts "Ending"
|
|
:timer.sleep(1000)
|
|
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
|
|
end
|
|
|
|
# Looping functions
|
|
def loop_main(neural_net, n, @n_train), do:
|
|
neural_net
|
|
|
|
def loop_main(neural_net, n, i), do:
|
|
loop_main(n, i)
|
|
|
|
def loop_main(neural_net, i \\ 0) do
|
|
with {a, n} <- train(neural_net, Dataset.points, Dataset.values) do
|
|
with 0 <- rem(i, 25), do: draw(n)
|
|
loop_main(neural_net, n, i+1)
|
|
end
|
|
end
|
|
|
|
# Train functions
|
|
def forward_pass(neural_net, x) do
|
|
Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
|
|
with z <- Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b), do: acc ++ [{z, sigmoid(z)}]
|
|
end)
|
|
end
|
|
|
|
def backward_pass(neural_net, x, y, out) do
|
|
Enum.reduce(Enum.count(neural_net)-1..0, {neural_net, out, [], nil}, fn l, {neural_net_acc, out_acc, delta_acc, w_acc} ->
|
|
with {z, a} <- Enum.at(out_acc, l+1), delta <- get_delta(neural_net, delta_acc, w_acc, a, y, l), do:
|
|
{update_nn(neural_net_acc, out_acc, l, delta), out_acc, delta, transpose(Enum.at(neural_net, l).w)}
|
|
end)
|
|
end
|
|
|
|
def get_delta(neural_net, delta_acc, w_acc, a, y, l) do
|
|
case l == Enum.count(neural_net)-1 do
|
|
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
|
|
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
|
|
end
|
|
end
|
|
|
|
def update_nn(neural_net, out, l, delta) do
|
|
List.update_at(neural_net, l, fn n ->
|
|
%{
|
|
b: n.b - multiply(Toolex.mean(Enum.at(delta, 0)), @lr),
|
|
w: n.w - multiply(dot(transpose(elem(Enum.at(out, l),1)), Enum.at(delta, 0)), @lr)
|
|
}
|
|
end)
|
|
end
|
|
|
|
def train(neural_net, x, y, train \\ true) do
|
|
case {train, forward_pass(neural_net, x)} do
|
|
{true, out} -> with {neural_net_o, out_o, _delta, _w} <- backward_pass(neural_net, x, y, out), do: {elem(List.last(out_o),1), neural_net_o}
|
|
{false, out} -> {elem(List.last(out),1), neural_net}
|
|
end
|
|
end
|
|
|
|
# Neural net creation functions
|
|
def neural_layer(n_conn, n_neur), do:
|
|
%{
|
|
b: random(1, n_neur) |> multiply(2) |> subtract(1),
|
|
w: random(n_conn, n_neur) |> multiply(2) |> subtract(1)
|
|
}
|
|
|
|
def create_nn(topology), do:
|
|
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
|
|
|
|
# Draw functions
|
|
def set_points(m, points, values), do:
|
|
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
|
|
set(acc, trunc(points[p][1]), trunc(points[p][2]),trunc(values[p]))
|
|
end)
|
|
|
|
def generate_draw(nn), do:
|
|
with ls <- Tools.linspace(String.to_float("-1.5"), 1.5, @res), do:
|
|
Enum.reduce(Enum.with_index(ls), zeros(@res), fn {i2, i1}, acc1 ->
|
|
Enum.reduce(Enum.with_index(ls), acc1, fn {j2, j1}, acc2 ->
|
|
set(acc2, i1+1, j1+1, elem(train(nn, new([[i2, j2]]), Dataset.values, false), 0)[1])
|
|
end)
|
|
end)
|
|
|
|
def draw(nn), do:
|
|
generate_draw(nn)
|
|
|> set_points(Dataset.points |> multiply(16.5) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|
|
|> heatmap(:color24bit, [title: "----------------------Prediction----------------------"])
|
|
end
|