Skip to content

Commit 457b21e

Browse files
committed
pure ruby alternative
1 parent 2f8510f commit 457b21e

File tree

15 files changed

+414
-12
lines changed

15 files changed

+414
-12
lines changed

nature-of-code/xor/src/nn/Connection.java

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,9 @@ public class Connection {
1616
public Connection(Neuron a_, Neuron b_) {
1717
from = a_;
1818
to = b_;
19-
weight = (double) Math.random()*2-1;
19+
weight = Math.random()*2-1;
2020
}
21-
21+
2222
// In case I want to set the weights manually, using this for testing
2323
public Connection(Neuron a_, Neuron b_, double w) {
2424
from = a_;
@@ -29,11 +29,11 @@ public Connection(Neuron a_, Neuron b_, double w) {
2929
public Neuron getFrom() {
3030
return from;
3131
}
32-
32+
3333
public Neuron getTo() {
3434
return to;
35-
}
36-
35+
}
36+
3737
public double getWeight() {
3838
return weight;
3939
}
@@ -42,6 +42,4 @@ public double getWeight() {
4242
public void adjustWeight(double deltaWeight) {
4343
weight += deltaWeight;
4444
}
45-
46-
4745
}

nature-of-code/xor/src/nn/Neuron.java

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,14 +12,14 @@
1212
public class Neuron {
1313

1414
protected double output;
15-
protected ArrayList<Connection> connections;
15+
protected ArrayList<Connection> connections;
1616
protected boolean bias = false;
1717

1818
// A regular Neuron
1919
public Neuron() {
2020
output = 0;
2121
// Using an arraylist to store list of connections to other neurons
22-
connections = new ArrayList<>();
22+
connections = new ArrayList<>();
2323
bias = false;
2424
}
2525

@@ -56,7 +56,7 @@ public void calcOutput() {
5656
}
5757
}
5858
// Output is result of sigmoid function
59-
output = f(lbias+sum);
59+
output = f(lbias + sum);
6060
}
6161
}
6262

@@ -76,6 +76,4 @@ public static double f(double x) {
7676
public ArrayList<Connection> getConnections() {
7777
return connections;
7878
}
79-
80-
8179
}

nature-of-code/xor_ruby/README.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Alternative xor (neural net) Library
2+
===================
3+
4+
Here we translate the java library to pure ruby
5+
6+
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
# The Nature of Code
2+
# Daniel Shiffman
3+
# https://natureofcode.com
4+
5+
# "Landscape" example
6+
class Landscape
7+
include Propane::Proxy
8+
9+
attr_reader :scl, :w, :h, :rows, :cols, :z, :zoff
10+
11+
def initialize(scl, w, h)
12+
@scl, @w, @h = scl, w, h
13+
@cols = w / scl
14+
@rows = h / scl
15+
@z = Array.new(cols, Array.new(rows, 0.0))
16+
@zoff = 0
17+
end
18+
19+
# Calculate height values (based off a neural network)
20+
def calculate(nn)
21+
val = lambda do |curr, net, x, y|
22+
curr * 0.95 + 0.05 * (net.feed_forward([x, y]) * 280.0 - 140.0)
23+
end
24+
@z = (0...cols).map do |i|
25+
(0...rows).map do |j|
26+
val.call(z[i][j], nn, i * 1.0 / cols, j * 1.0 / cols)
27+
end
28+
end
29+
end
30+
31+
# Render landscape as grid of quads
32+
def render
33+
# Every cell is an individual quad
34+
# using the propane grid convenience function instead of a nested loop
35+
grid(z.size - 1, z[0].size - 1) do |x, y|
36+
# one quad at a time
37+
# each quad's color is determined by the height value at each vertex
38+
# (clean this part up)
39+
no_stroke
40+
push_matrix
41+
begin_shape(QUADS)
42+
translate(x * scl - w * 0.5, y * scl - h * 0.5, 0)
43+
fill(z[x][y] + 127, 220)
44+
vertex(0, 0, z[x][y])
45+
fill(z[x + 1][y] + 127, 220)
46+
vertex(scl, 0, z[x + 1][y])
47+
fill(z[x + 1][y + 1] + 127, 220)
48+
vertex(scl, scl, z[x + 1][y + 1])
49+
fill(z[x][y + 1] + 127, 220)
50+
vertex(0, scl, z[x][y + 1])
51+
end_shape
52+
pop_matrix
53+
end
54+
end
55+
end
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
class Connection
2+
attr_reader :from, :to, :weight
3+
4+
def initialize(from, to, weight = rand(-1..1.0))
5+
@from = from
6+
@to = to
7+
@weight = weight
8+
end
9+
10+
def adjust_weight(delta)
11+
@weight += delta
12+
end
13+
end
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
class InputNeuron < Neuron
2+
def input(data)
3+
@output = data
4+
end
5+
end
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
# Daniel Shiffman
2+
# The Nature of Code, Fall 2006
3+
# Neural Network
4+
# Class to describe the entire network
5+
# Arrays for input neurons, hidden neurons, and output neuron
6+
# Need to update this so that it would work with an array out outputs
7+
# Rather silly that I didn't do this initially
8+
# Also need to build in a "Layer" class so that there can easily
9+
# be more than one hidden layer
10+
class Network
11+
LEARNING_CONSTANT = 0.5
12+
attr_reader :input, :hidden, :output
13+
# Only One output now to start!!! (i can do better, really. . .)
14+
# Constructor makes the entire network based on number of inputs & number of
15+
# neurons in hidden layer
16+
# Only One hidden layer!!! (fix this dood)
17+
def initialize(inputs, hidden_total)
18+
@input = (0..inputs).map { InputNeuron.new } # Got to add a bias input
19+
@hidden = (0..hidden_total).map { Neuron.new } # same as regular Neuron
20+
# Make bias neurons
21+
input[inputs] = Neuron.new(1)
22+
hidden[hidden_total] = Neuron.new(1)
23+
# Make output neuron
24+
@output = Neuron.new # same as regular Neuron
25+
# Connect input layer to hidden layer
26+
input.each do |input1|
27+
(0...hidden.length).each do |j|
28+
# Create the object and put it in both neurons
29+
c = Connection.new input1, hidden[j]
30+
input1.add_connection(c)
31+
hidden[j].add_connection(c)
32+
end
33+
end
34+
# Connect the hidden layer to the output neuron
35+
hidden.each do |hidden1|
36+
c = Connection.new(hidden1, output)
37+
hidden1.add_connection(c)
38+
output.add_connection(c)
39+
end
40+
end
41+
42+
def feed_forward(input_vals)
43+
# Feed the input with an array of inputs
44+
input_vals.each_with_index do |val, i|
45+
input[i].input(val)
46+
end
47+
48+
# Have the hidden layer calculate its output
49+
(0...hidden.length).each do |i|
50+
hidden[i].calc_output
51+
end
52+
53+
# Calculate the output of the output neuron
54+
output.calc_output
55+
56+
# Return output
57+
output.output
58+
end
59+
60+
def train(inputs, answer)
61+
result = feed_forward(inputs)
62+
# This is where the error correction all starts
63+
# Derivative of sigmoid output function * diff between known and guess
64+
delta_output = result * (1 - result) * (answer - result)
65+
# BACKPROPOGATION
66+
# This is easier b/c we just have one output
67+
# Apply Delta to connections between hidden and output
68+
connections = output.connections
69+
connections.each do |c|
70+
neuron = c.from
71+
loutput = neuron.output
72+
delta_weight = loutput * delta_output
73+
c.adjust_weight(LEARNING_CONSTANT * delta_weight)
74+
end
75+
76+
# ADJUST HIDDEN WEIGHTS
77+
hidden.each do |hidden1|
78+
connections = hidden1.connections
79+
sum = 0
80+
# Sum output delta * hidden layer connections (just one output)
81+
connections.each do |c|
82+
# Is this a from hidden layer to next layer (output)?
83+
sum += c.weight * delta_output if c.from == hidden1
84+
end
85+
# Then adjust the weights coming in based:
86+
# Above sum * derivative of sigmoid output function for hidden neurons
87+
connections.each do |c|
88+
# Is this a from previous layer (input) to hidden layer?
89+
next unless c.to == hidden1
90+
loutput = hidden1.output
91+
delta_hidden = loutput * (1 - loutput) # Derivative of sigmoid(x)
92+
delta_hidden *= sum # Would sum for all outputs if more than one output
93+
neuron = c.from
94+
delta_weight = neuron.output * delta_hidden
95+
c.adjust_weight(LEARNING_CONSTANT * delta_weight)
96+
end
97+
end
98+
result
99+
end
100+
end
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
class Neuron
2+
attr_reader :output, :connections, :bias
3+
4+
def initialize(output = 0)
5+
@connections = []
6+
@output = output
7+
@bias = !output.zero?
8+
end
9+
10+
def add_connection(c)
11+
connections << c
12+
end
13+
14+
def calc_output
15+
return if bias # do nothing
16+
sigmoid = ->(x) { 1.0 / (1.0 + Math.exp(-x)) }
17+
sum = 0
18+
lbias = 0
19+
# fstring = 'Looking through %d connections'
20+
# puts(format(fstring, connections.size))
21+
connections.each do |c|
22+
from = c.from
23+
to = c.to
24+
# Is this connection moving forward to us
25+
# Ignore connections that we send our output to
26+
if to == self
27+
# This isn't really necessary
28+
# Ttreating the bias individually in case needed to at some point
29+
if from.bias
30+
lbias = from.output * c.weight
31+
else
32+
sum += from.output * c.weight
33+
end
34+
end
35+
end
36+
# Output is result of sigmoid function
37+
@output = sigmoid.call(lbias + sum)
38+
end
39+
end
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
require_relative 'test_helper'
2+
require 'java'
3+
require_relative '../xor'
4+
5+
class ConnectionTest < Minitest::Test
6+
7+
def test_new_from_to
8+
from = Neuron.new(1)
9+
to = Neuron.new(2)
10+
connection = Connection.new from, to
11+
end
12+
13+
def test_new_from_to_weight
14+
from = Neuron.new(1)
15+
to = Neuron.new(2)
16+
connection = Connection.new from, to, rand(-1..1.0)
17+
end
18+
19+
def test_adjust_weight
20+
from = Neuron.new(1)
21+
to = Neuron.new(2)
22+
connection = Connection.new from, to, 0.5
23+
connection.adjust_weight(0.2)
24+
assert_in_epsilon(0.7, connection.weight)
25+
end
26+
end
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
require_relative 'test_helper'
2+
require 'java'
3+
require_relative '../xor'
4+
5+
6+
class NeuronTest < Minitest::Test
7+
8+
def test_new_no_param
9+
neuron = InputNeuron.new
10+
assert !neuron.bias
11+
assert neuron.output.zero?
12+
end
13+
14+
def test_newparam
15+
input = 1
16+
neuron = InputNeuron.new input
17+
assert neuron.bias
18+
assert_equal neuron.output, input
19+
end
20+
21+
def test_input
22+
neuron = InputNeuron.new
23+
neuron.input 2
24+
assert_equal 2, neuron.output
25+
end
26+
end

0 commit comments

Comments
 (0)