Skip to content

Commit 16ee010

Browse files
committed
update to more modern java
1 parent e54d4f0 commit 16ee010

File tree

4 files changed

+25
-25
lines changed

4 files changed

+25
-25
lines changed

xor/Rakefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ end
1616

1717
desc 'Run'
1818
task :run do
19-
sh 'k9 run xor.rb'
19+
sh 'jruby xor.rb'
2020
end
2121

2222
desc 'clean'

xor/ext/nn/Connection.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88

99
public class Connection {
1010

11-
private Neuron from; // Connection goes from. . .
12-
private Neuron to; // To. . .
11+
private final Neuron from; // Connection goes from. . .
12+
private final Neuron to; // To. . .
1313
private float weight; // Weight of the connection. . .
1414

1515
// Constructor builds a connection with a random weight

xor/ext/nn/Network.java

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -51,19 +51,19 @@ public Network(int inputs, int hiddentotal) {
5151
output = new OutputNeuron();
5252

5353
// Connect input layer to hidden layer
54-
for (int i = 0; i < input.length; i++) {
54+
for (InputNeuron input1 : input) {
5555
for (int j = 0; j < hidden.length-1; j++) {
5656
// Create the connection object and put it in both neurons
57-
Connection c = new Connection(input[i],hidden[j]);
58-
input[i].addConnection(c);
57+
Connection c = new Connection(input1, hidden[j]);
58+
input1.addConnection(c);
5959
hidden[j].addConnection(c);
6060
}
6161
}
6262

6363
// Connect the hidden layer to the output neuron
64-
for (int i = 0; i < hidden.length; i++) {
65-
Connection c = new Connection(hidden[i],output);
66-
hidden[i].addConnection(c);
64+
for (HiddenNeuron hidden1 : hidden) {
65+
Connection c = new Connection(hidden1, output);
66+
hidden1.addConnection(c);
6767
output.addConnection(c);
6868
}
6969

@@ -105,20 +105,20 @@ public float train(float[] inputs, float answer) {
105105
for (int i = 0; i < connections.size(); i++) {
106106
Connection c = (Connection) connections.get(i);
107107
Neuron neuron = c.getFrom();
108-
float output = neuron.getOutput();
109-
float deltaWeight = output*deltaOutput;
108+
float loutput = neuron.getOutput();
109+
float deltaWeight = loutput*deltaOutput;
110110
c.adjustWeight(LEARNING_CONSTANT*deltaWeight);
111111
}
112112

113113
// ADJUST HIDDEN WEIGHTS
114-
for (int i = 0; i < hidden.length; i++) {
115-
connections = hidden[i].getConnections();
114+
for (HiddenNeuron hidden1 : hidden) {
115+
connections = hidden1.getConnections();
116116
float sum = 0;
117117
// Sum output delta * hidden layer connections (just one output)
118118
for (int j = 0; j < connections.size(); j++) {
119119
Connection c = (Connection) connections.get(j);
120120
// Is this a connection from hidden layer to next layer (output)?
121-
if (c.getFrom() == hidden[i]) {
121+
if (c.getFrom() == hidden1) {
122122
sum += c.getWeight()*deltaOutput;
123123
}
124124
}
@@ -127,9 +127,9 @@ public float train(float[] inputs, float answer) {
127127
for (int j = 0; j < connections.size(); j++) {
128128
Connection c = (Connection) connections.get(j);
129129
// Is this a connection from previous layer (input) to hidden layer?
130-
if (c.getTo() == hidden[i]) {
131-
float output = hidden[i].getOutput();
132-
float deltaHidden = output * (1 - output); // Derivative of sigmoid(x)
130+
if (c.getTo() == hidden1) {
131+
float loutput = hidden1.getOutput();
132+
float deltaHidden = loutput * (1 - loutput); // Derivative of sigmoid(x)
133133
deltaHidden *= sum; // Would sum for all outputs if more than one output
134134
Neuron neuron = c.getFrom();
135135
float deltaWeight = neuron.getOutput()*deltaHidden;

xor/ext/nn/Neuron.java

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -12,21 +12,21 @@
1212
public class Neuron {
1313

1414
protected float output;
15-
protected ArrayList connections;
15+
protected ArrayList<Connection> connections;
1616
protected boolean bias = false;
1717

1818
// A regular Neuron
1919
public Neuron() {
2020
output = 0;
2121
// Using an arraylist to store list of connections to other neurons
22-
connections = new ArrayList();
22+
connections = new ArrayList<>();
2323
bias = false;
2424
}
2525

2626
// Constructor for a bias neuron
2727
public Neuron(int i) {
2828
output = i;
29-
connections = new ArrayList();
29+
connections = new ArrayList<>();
3030
bias = true;
3131
}
3232

@@ -37,10 +37,10 @@ public void calcOutput() {
3737
// do nothing
3838
} else {
3939
float sum = 0;
40-
float bias = 0;
40+
float lbias = 0;
4141
//System.out.println("Looking through " + connections.size() + " connections");
4242
for (int i = 0; i < connections.size(); i++) {
43-
Connection c = (Connection) connections.get(i);
43+
Connection c = connections.get(i);
4444
Neuron from = c.getFrom();
4545
Neuron to = c.getTo();
4646
// Is this connection moving forward to us
@@ -49,14 +49,14 @@ public void calcOutput() {
4949
// This isn't really necessary
5050
// But I am treating the bias individually in case I need to at some point
5151
if (from.bias) {
52-
bias = from.getOutput()*c.getWeight();
52+
lbias = from.getOutput()*c.getWeight();
5353
} else {
5454
sum += from.getOutput()*c.getWeight();
5555
}
5656
}
5757
}
5858
// Output is result of sigmoid function
59-
output = f(bias+sum);
59+
output = f(lbias+sum);
6060
}
6161
}
6262

@@ -73,7 +73,7 @@ public static float f(float x) {
7373
return 1.0f / (1.0f + (float) Math.exp(-x));
7474
}
7575

76-
public ArrayList getConnections() {
76+
public ArrayList<Connection> getConnections() {
7777
return connections;
7878
}
7979

0 commit comments

Comments
 (0)