Browse Source

Readme

master
Gustavo Adolfo Mesa Roldán 3 years ago
parent
commit
186abe80f2
3 changed files with 149 additions and 101 deletions
  1. +5
    -13
      README.md
  2. +27
    -88
      lib/p1.ex
  3. +117
    -0
      lib/p1_old.ex

+ 5
- 13
README.md View File

@ -1,21 +1,13 @@
# P1
**TODO: Add description**
**Simple AI example, to classify points of two different colors.**
## Installation
This example with [Matrex](https://hexdocs.pm/matrex/Matrex.html) is based on [this video](https://www.youtube.com/watch?v=W8AeOXa_FqU&list=PL-Ogd76BhmcCO4VeOlIH93BMT5A_kKAXp&index=4)
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `p1` to your list of dependencies in `mix.exs`:
## Run
```elixir
def deps do
[
{:p1, "~> 0.1.0"}
]
end
```
mix run
```
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at [https://hexdocs.pm/p1](https://hexdocs.pm/p1).

+ 27
- 88
lib/p1.ex View File

@ -16,84 +16,40 @@ defmodule P1 do
@res 50
def start(_type, _args) do
# zeros(10)
# |> set_graph(Tools.linspace(String.to_integer("-5"), 5, 10))
# |> heatmap(:color24bit)
# zeros(50)
# |> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
# |> heatmap(:color24bit)
# IO.puts inspect Dataset.points
# loss_o = if rem(i, 25) == 0 do
# Enum.concat(loss, [Toolex.cost(a, transpose(Dataset.values))])
# else
# loss
# end
# z = [loss_o] |> new
# IO.puts inspect z
# if z[:columns] > 10 do
# zeros(z[:columns])
# |> set_graph(z)
# |> heatmap(:color24bit)
# #:timer.sleep(60)
# end
# IO.puts inspect random(1, 6) |> multiply(2) |> subtract(1)
# IO.puts inspect Toolex.mean(Dataset2.ab)
# IO.puts inspect Dataset2.aa |> subtract(Toolex.mean(Dataset2.bb)) |> multiply(0.5)
# IO.puts inspect Dataset2.ac |> subtract(Toolex.mean(Dataset2.ab)) |> multiply(0.5)
# IO.puts inspect Dataset2.ac - multiply(Toolex.mean(Dataset2.ab), 0.5)
IO.puts "starting"
loop_main(create_nn(@topology))
IO.puts "Ending"
:timer.sleep(1000)
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
end
Enum.reduce(0..2500, create_nn(@topology), fn i, neural_net_acc ->
{_a, n} = train(neural_net_acc, Dataset.points, Dataset.values)
def loop_main(neural_net, n, 2500) do
neural_net
end
def loop_main(neural_net, n, i) do
loop_main(n, i)
end
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
# IO.puts inspect x0
# IO.puts inspect x1
# IO.puts inspect a
def loop_main(neural_net, i \\ 0) do
{a, n} = train(neural_net, Dataset.points, Dataset.values)
if rem(i, 25) == 0 do
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(n, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Pensando----------------------"])
#|> inspect |> IO.puts
:timer.sleep(50)
paint(n)
end
n
end)
# a = [[0.49451709],
# [0.48813893],
# [0.47200026],
# [0.46428155],
# [0.4900677 ],
# [0.49409855]] |> new
# b = [[0], [1], [1], [0], [1], [0]] |> new
# #IO.puts inspect Toolex.cost_d(a, b)
# c = (1 - a)
# IO.puts inspect Toolex.sigm_d(a)
loop_main(neural_net, n, i+1)
end
IO.puts "Ending"
:timer.sleep(1000)
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
def paint(nn) do
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(nn, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Solucion----------------------"])
end
def train(neural_net, x, y, train \\ true) do
@ -138,15 +94,6 @@ defmodule P1 do
end)
end
def set_graph(m, points) do
size = points[:columns]
points = (size + 1) - (sigmoid(points) * size) |> truncate
Enum.reduce(1..size, m, fn p, acc ->
set(acc, trunc(points[p]), p, 1)
end)
end
def neural_layer(n_conn, n_neur) do
%{
b: random(1, n_neur) |> multiply(2) |> subtract(1),
@ -157,12 +104,4 @@ defmodule P1 do
def create_nn(topology) do
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
end
def loop do
:timer.sleep(16);
zeros(50)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit)
loop
end
end

+ 117
- 0
lib/p1_old.ex View File

@ -0,0 +1,117 @@
defmodule P1old do
import Matrex.Operators
import Kernel, except: [-: 1, +: 2, -: 2, *: 2, /: 2, <|>: 2]
import Matrex
@moduledoc """
Documentation for `P1`.
"""
@doc """
Run to start app.
"""
@p 2
@topology new([[@p, 4, 8, 1]])
@lr 0.1
@rows 500
@res 50
def start(_type, _args) do
IO.puts "starting"
Enum.reduce(0..2500, create_nn(@topology), fn i, neural_net_acc ->
{_a, n} = train(neural_net_acc, Dataset.points, Dataset.values)
x0 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
x1 = Tools.linspace(String.to_float("-1.5"), 1.5, @res)
if rem(i, 25) == 0 do
Enum.reduce(Enum.with_index(x0), zeros(@res), fn {i2, i1}, acc1 ->
Enum.reduce(Enum.with_index(x1), acc1, fn {j2, j1}, acc2 ->
set(acc2, i1+1, j1+1, elem(train(n, new([[i2, j2]]), Dataset.values, false), 0)[1])
end)
end)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit, [title: "----------------------Pensando----------------------"])
#:timer.sleep(250)
end
n
end)
IO.puts "Ending"
:timer.sleep(1000)
Task.start(fn -> :timer.sleep(1000); IO.puts("done sleeping") end)
end
def train(neural_net, x, y, train \\ true) do
# Forward pass
out = Enum.reduce(0..Enum.count(neural_net)-1, [{nil, x}], fn l, acc ->
z = Toolex.sum_inline(dot(elem(List.last(acc), 1), Enum.at(neural_net, l).w), Enum.at(neural_net, l).b)
acc ++ [{z, sigmoid(z)}]
end)
# Backward pass
if train do
{out_o, _delta, neural_net_o, _w} = Enum.reduce(Enum.count(neural_net)-1..0, {out, [], neural_net, nil}, fn l, {out_acc, delta_acc, neural_net_acc, w_acc} ->
{z, a} = Enum.at(out_acc, l+1)
d = case l == Enum.count(neural_net)-1 do
true -> [Matrex.multiply(Toolex.cost_d(a, transpose(y)), Toolex.sigm_d(a))]
false -> Enum.concat([multiply(dot(Enum.at(delta_acc, 0), w_acc), Toolex.sigm_d(a))], delta_acc)
end
neural_net_acc_N = List.update_at(neural_net_acc, l, fn n ->
%{
b: n.b - multiply(Toolex.mean(Enum.at(d, 0)), @lr),
w: n.w - multiply(dot(transpose(elem(Enum.at(out_acc, l),1)), Enum.at(d, 0)), @lr)
}
end)
{out_acc, d, neural_net_acc_N, transpose(Enum.at(neural_net, l).w)}
end)
{elem(List.last(out_o),1), neural_net_o}
else
{elem(List.last(out),1), neural_net}
end
end
def set_points(m, points, values) do
Enum.reduce(1..Enum.count(values), m, fn p, acc ->
set(acc, trunc(points[p][1]), trunc(points[p][2]),
(case trunc(values[p]) do
0 -> 0
1 -> 1
end))
end)
end
def set_graph(m, points) do
size = points[:columns]
points = (size + 1) - (sigmoid(points) * size) |> truncate
Enum.reduce(1..size, m, fn p, acc ->
set(acc, trunc(points[p]), p, 1)
end)
end
def neural_layer(n_conn, n_neur) do
%{
b: random(1, n_neur) |> multiply(2) |> subtract(1),
w: random(n_conn, n_neur) |> multiply(2) |> subtract(1)
}
end
def create_nn(topology) do
for n <- 1..Enum.count(topology)-1, do: neural_layer(trunc(topology[n]), trunc(topology[n+1]))
end
def loop do
:timer.sleep(16);
zeros(50)
|> set_points(Dataset.points |> multiply(20) |> add(new(@rows, 2, fn -> 25 end)), Dataset.values)
|> heatmap(:color24bit)
loop
end
end

Loading…
Cancel
Save