defmodule P1 do
|
|
import Matrex.Operators
|
|
import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
|
|
import Matrex
|
|
@moduledoc """
|
|
Documentation for `P1`.
|
|
"""
|
|
|
|
@doc """
|
|
Run to start app.
|
|
"""
|
|
@p 2
|
|
@topology new([[@p, 4, 8, 1]])
|
|
@lr 0.5
|
|
@rows 6
|
|
|
|
def start(_type, _args) do
|
|
IO.puts "starting"
|
|
IO.puts inspect Tools.linspace(String.to_integer("-5"), 5, 10)
|
|
|
|
zeros(50)
|
|
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|
|
|> heatmap(:color24bit)
|
|
|
|
zeros(10)
|
|
|> set_graph(Tools.linspace(String.to_integer("-5"), 5, 10))
|
|
|> heatmap(:color24bit)
|
|
|
|
train(create_nn(@topology), Dataset.points, Dataset.values)
|
|
|
|
|
|
IO.puts "Ending"
|
|
:timer.sleep(1000)
|
|
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
|
|
end
|
|
|
|
|
|
def loop do
|
|
:timer.sleep(16);
|
|
zeros(50)
|
|
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|
|
|> heatmap(:color24bit)
|
|
loop
|
|
end
|
|
|
|
def set_points(m, points, values) do
|
|
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
|
|
set(acc, trunc(points[p][1]), trunc(points[p][2]), values[p]+2)
|
|
end)
|
|
end
|
|
|
|
def set_graph(m, points) do
|
|
size = Enum.count(points)
|
|
points = (size + 1) - (sigmoid(points) * size) |> truncate
|
|
|
|
Enum.reduce(1..size, m, fn p, acc ->
|
|
set(acc, trunc(points[p]), p, 1)
|
|
end)
|
|
end
|
|
|
|
def neural_layer(n_conn, n_neur) do
|
|
%{
|
|
b: random(1, n_neur),
|
|
w: random(n_conn, n_neur)
|
|
}
|
|
end
|
|
|
|
def create_nn(topology) do
|
|
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
|
|
end
|
|
|
|
def train(neural_net, x, y, train \\ true) do
|
|
# Forward pass
|
|
out = Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
|
|
z = Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b)
|
|
acc ++ [{z, sigmoid(z)}]
|
|
end)
|
|
|
|
# Backward pass
|
|
if train do
|
|
{out, _delta, _neural_net, _w} = Enum.reduce(Enum.count(neural_net)-1..0, {out, [], neural_net, nil} , fn l, {out_acc, delta_acc, neural_net_acc, w_acc} ->
|
|
{z, a} = Enum.at(out_acc, l+1)
|
|
|
|
d = case l == Enum.count(neural_net)-1 do
|
|
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Matrex.Algorithms.sigmoid_gradient(a))]
|
|
false ->
|
|
Enum.concat(
|
|
multiply(
|
|
dot(
|
|
Enum.at(delta_acc, 0),
|
|
w_acc
|
|
),
|
|
Matrex.Algorithms.sigmoid_gradient(a)
|
|
),
|
|
delta_acc
|
|
)
|
|
end
|
|
#IO.puts "***********"
|
|
|
|
{out_acc, d, neural_net_acc, Enum.at(neural_net, l).w}
|
|
end)
|
|
end
|
|
|
|
elem(List.last(out),1)
|
|
end
|
|
|
|
|
|
end
|