Skip to content

Commit 2f8510f

Browse files
committed
remove pointless casting to float
1 parent a3d6d04 commit 2f8510f

File tree

6 files changed

+72
-24
lines changed

6 files changed

+72
-24
lines changed

nature-of-code/xor/pom.rb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,5 +28,4 @@
2828
source_directory 'src'
2929
final_name 'xor'
3030
end
31-
3231
end

nature-of-code/xor/pom.xml

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<!--
3+
4+
5+
DO NOT MODIFIY - GENERATED CODE
6+
7+
8+
-->
9+
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
10+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
11+
<modelVersion>4.0.0</modelVersion>
12+
<groupId>nn</groupId>
13+
<artifactId>xor</artifactId>
14+
<version>1.0-SNAPSHOT</version>
15+
<name>xor</name>
16+
<description>neural net library for xor</description>
17+
<developers>
18+
<developer>
19+
<id>shiffman</id>
20+
<name>DanShiffman</name>
21+
<roles>
22+
<role>developer</role>
23+
</roles>
24+
</developer>
25+
</developers>
26+
<properties>
27+
<polyglot.dump.pom>pom.xml</polyglot.dump.pom>
28+
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
29+
<maven.compiler.source>1.8</maven.compiler.source>
30+
<maven.compiler.target>1.8</maven.compiler.target>
31+
<xor.basedir>${project.basedir}</xor.basedir>
32+
</properties>
33+
<build>
34+
<sourceDirectory>src</sourceDirectory>
35+
<defaultGoal>package</defaultGoal>
36+
<finalName>xor</finalName>
37+
<pluginManagement>
38+
<plugins>
39+
<plugin>
40+
<artifactId>maven-jar-plugin</artifactId>
41+
<version>2.3.2</version>
42+
<configuration>
43+
<outputDirectory>${xor.basedir}/library/xor</outputDirectory>
44+
</configuration>
45+
</plugin>
46+
</plugins>
47+
</pluginManagement>
48+
</build>
49+
</project>

nature-of-code/xor/src/nn/Connection.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,17 +10,17 @@ public class Connection {
1010

1111
private final Neuron from; // Connection goes from. . .
1212
private final Neuron to; // To. . .
13-
private float weight; // Weight of the connection. . .
13+
private double weight; // Weight of the connection. . .
1414

1515
// Constructor builds a connection with a random weight
1616
public Connection(Neuron a_, Neuron b_) {
1717
from = a_;
1818
to = b_;
19-
weight = (float) Math.random()*2-1;
19+
weight = (double) Math.random()*2-1;
2020
}
2121

2222
// In case I want to set the weights manually, using this for testing
23-
public Connection(Neuron a_, Neuron b_, float w) {
23+
public Connection(Neuron a_, Neuron b_, double w) {
2424
from = a_;
2525
to = b_;
2626
weight = w;
@@ -34,12 +34,12 @@ public Neuron getTo() {
3434
return to;
3535
}
3636

37-
public float getWeight() {
37+
public double getWeight() {
3838
return weight;
3939
}
4040

4141
// Changing the weight of the connection
42-
public void adjustWeight(float deltaWeight) {
42+
public void adjustWeight(double deltaWeight) {
4343
weight += deltaWeight;
4444
}
4545

nature-of-code/xor/src/nn/InputNeuron.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ public InputNeuron(int i) {
1616
super(i);
1717
}
1818

19-
public void input(float d) {
19+
public void input(double d) {
2020
output = d;
2121
}
2222

nature-of-code/xor/src/nn/Network.java

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ public class Network {
2222
HiddenNeuron[] hidden;
2323
OutputNeuron output;
2424

25-
public static final float LEARNING_CONSTANT = 0.5f;
25+
public static final double LEARNING_CONSTANT = 0.5;
2626

2727
// Only One output now to start!!! (i can do better, really. . .)
2828
// Constructor makes the entire network based on number of inputs & number of neurons in hidden layer
@@ -70,7 +70,7 @@ public Network(int inputs, int hiddentotal) {
7070
}
7171

7272

73-
public float feedForward(float[] inputVals) {
73+
public double feedForward(double[] inputVals) {
7474

7575
// Feed the input with an array of inputs
7676
for (int i = 0; i < inputVals.length; i++) {
@@ -89,13 +89,13 @@ public float feedForward(float[] inputVals) {
8989
return output.getOutput();
9090
}
9191

92-
public float train(float[] inputs, float answer) {
93-
float result = feedForward(inputs);
92+
public double train(double[] inputs, double answer) {
93+
double result = feedForward(inputs);
9494

9595

9696
// This is where the error correction all starts
9797
// Derivative of sigmoid output function * diff between known and guess
98-
float deltaOutput = result*(1-result) * (answer-result);
98+
double deltaOutput = result*(1-result) * (answer-result);
9999

100100

101101
// BACKPROPOGATION
@@ -105,15 +105,15 @@ public float train(float[] inputs, float answer) {
105105
for (int i = 0; i < connections.size(); i++) {
106106
Connection c = (Connection) connections.get(i);
107107
Neuron neuron = c.getFrom();
108-
float loutput = neuron.getOutput();
109-
float deltaWeight = loutput*deltaOutput;
108+
double loutput = neuron.getOutput();
109+
double deltaWeight = loutput*deltaOutput;
110110
c.adjustWeight(LEARNING_CONSTANT*deltaWeight);
111111
}
112112

113113
// ADJUST HIDDEN WEIGHTS
114114
for (HiddenNeuron hidden1 : hidden) {
115115
connections = hidden1.getConnections();
116-
float sum = 0;
116+
double sum = 0;
117117
// Sum output delta * hidden layer connections (just one output)
118118
for (int j = 0; j < connections.size(); j++) {
119119
Connection c = (Connection) connections.get(j);
@@ -128,11 +128,11 @@ public float train(float[] inputs, float answer) {
128128
Connection c = (Connection) connections.get(j);
129129
// Is this a connection from previous layer (input) to hidden layer?
130130
if (c.getTo() == hidden1) {
131-
float loutput = hidden1.getOutput();
132-
float deltaHidden = loutput * (1 - loutput); // Derivative of sigmoid(x)
131+
double loutput = hidden1.getOutput();
132+
double deltaHidden = loutput * (1 - loutput); // Derivative of sigmoid(x)
133133
deltaHidden *= sum; // Would sum for all outputs if more than one output
134134
Neuron neuron = c.getFrom();
135-
float deltaWeight = neuron.getOutput()*deltaHidden;
135+
double deltaWeight = neuron.getOutput()*deltaHidden;
136136
c.adjustWeight(LEARNING_CONSTANT*deltaWeight);
137137
}
138138
}

nature-of-code/xor/src/nn/Neuron.java

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
public class Neuron {
1313

14-
protected float output;
14+
protected double output;
1515
protected ArrayList<Connection> connections;
1616
protected boolean bias = false;
1717

@@ -36,8 +36,8 @@ public void calcOutput() {
3636
if (bias) {
3737
// do nothing
3838
} else {
39-
float sum = 0;
40-
float lbias = 0;
39+
double sum = 0;
40+
double lbias = 0;
4141
//System.out.println("Looking through " + connections.size() + " connections");
4242
for (int i = 0; i < connections.size(); i++) {
4343
Connection c = connections.get(i);
@@ -64,13 +64,13 @@ void addConnection(Connection c) {
6464
connections.add(c);
6565
}
6666

67-
float getOutput() {
67+
double getOutput() {
6868
return output;
6969
}
7070

7171
// Sigmoid function
72-
public static float f(float x) {
73-
return 1.0f / (1.0f + (float) Math.exp(-x));
72+
public static double f(double x) {
73+
return 1.0 / (1.0 + Math.exp(-x));
7474
}
7575

7676
public ArrayList<Connection> getConnections() {

0 commit comments

Comments
 (0)