Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 90 additions & 0 deletions NeuralNet.podspec
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
#
# Be sure to run `pod spec lint Toolbox.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#

Pod::Spec.new do |spec|

spec.name = "NeuralNet"
spec.version = "0.0.1"
spec.summary = "An artificial neural network written in Swift"

spec.description = "An artificial neural network written in Swift"

spec.homepage = "https://github.com/Swift-AI/Swift-AI"

spec.license = "MIT"

spec.author = { "Collin Hundley" => "collinhundley@gmail.com" }

# spec.platform = :ios
# spec.platform = :ios, "5.0"

# When using multiple platforms
# spec.ios.deployment_target = "5.0"
# spec.osx.deployment_target = "10.7"
# spec.watchos.deployment_target = "2.0"
# spec.tvos.deployment_target = "9.0"


# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#

spec.source = { :git => "https://github.com/Swift-AI/NeuralNet.git" }


# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#

spec.source_files = 'Sources/**/*.swift'

# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#

# spec.resource = "icon.png"
# spec.resources = "Resources/*.png"

# spec.preserve_paths = "FilesToSave", "MoreFilesToSave"


# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#

# spec.framework = "SomeFramework"
# spec.frameworks = "SomeFramework", "AnotherFramework"

# spec.library = "iconv"
# spec.libraries = "iconv", "xml2"


# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.

# spec.requires_arc = true

# spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# spec.dependency "JSONKit", "~> 1.4"

end
4 changes: 3 additions & 1 deletion Sources/Activation.swift
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,9 @@ public extension NeuralNet {
// TODO
break
case .sigmoid:
result = zip(real, target).map{(-$0 * (1 - $0) * ($1 - $0))}
result = zip(real, target).map { (real: Float, target: Float) in
(-real * (1 - real) * (target - real))
}
case .softmax:
vDSP_vsub(target, 1,
real, 1,
Expand Down
70 changes: 63 additions & 7 deletions Sources/NeuralNet.swift
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public final class NeuralNet {

// MARK: Initialization

public init(structure: Structure, weights: [[Float]]? = nil) throws {
public init(structure: Structure, weights: [[Float]]? = nil, biases: [[Float]]? = nil) throws {
// Initialize basic properties
self.numLayers = structure.numLayers
self.layerNodeCounts = structure.layerNodeCounts
Expand All @@ -94,8 +94,30 @@ public final class NeuralNet {
} else {
randomizeWeights()
}

if let biases = biases {
try self.setBiases(biases)
}
}

/// Creates a new neural net by copying another neural net and modifying the batch size
/// This is useful if you use a large batch size to train the model but then want to infer with a smaller batch size (for instance 1)
public init(neuralNet: NeuralNet, structure: Structure, batchSize: Int) throws {
self.numLayers = neuralNet.numLayers
self.layerNodeCounts = neuralNet.layerNodeCounts
self.batchSize = batchSize
self.hiddenActivation = neuralNet.hiddenActivation
self.outputActivation = neuralNet.outputActivation
self.learningRate = neuralNet.learningRate
self.momentumFactor = neuralNet.momentumFactor
self.adjustedLearningRate = neuralNet.adjustedLearningRate

// Initialize computed properties and caches
self.cache = Cache(structure: structure)

try self.setWeights(neuralNet.allWeights())
self.cache.layerBiases = neuralNet.cache.layerBiases
}

}


Expand All @@ -118,6 +140,22 @@ public extension NeuralNet {
public func allWeights() -> [[Float]] {
return cache.layerWeights
}

/// Resets the network with the given biases (i.e. from a pre-trained network).
/// This change may safely be performed at any time.
///
/// - Parameter weights: A 2D array of biases corresponding to each layer in the network.
public func setBiases(_ biases: [[Float]]) throws {
// TODO: ensure valid number of weights

// Reset all weights in the network
cache.layerBiases = biases
}

/// Returns an array of the network's current biases for each layer.
public func allBiases() -> [[Float]] {
return cache.layerBiases
}

/// Randomizes all of the network's weights.
fileprivate func randomizeWeights() {
Expand Down Expand Up @@ -474,12 +512,16 @@ public extension NeuralNet {
/// The handler must return a `Bool` indicating whether training should continue.
/// If `false` is returned, the training routine will exit immediately and return.
/// The user may implement this block to monitor the training progress, tune network parameters,
/// or perform any other logic desired.
/// or perform any other logic desired. NOTE: The validation error and training error are both
/// returned here, but the training error is taken before back propagation so may be slightly
/// lower than the actual number. In practice, this doesn't matter and is a performance
/// improvement, so you can probably ignore this fact.
/// - Returns: The total number of training epochs performed, and the final validation error.
/// - Throws: An error if invalid data is provided. Checks are performed in advance to avoid problems during the training cycle.
@discardableResult
public func train(_ data: Dataset, maxEpochs: Int,
errorThreshold: Float, errorFunction: ErrorFunction,
epochCallback: ((_ epoch: Int, _ error: Float) -> Bool)?) throws -> (epochs: Int, error: Float) {
epochCallback: ((_ epoch: Int, _ validationError: Float, _ trainingError: Float) -> Bool)?) throws -> (epochs: Int, error: Float) {
// Ensure valid error threshold
guard errorThreshold > 0 else {
throw Error.train("Training error threshold must be greater than zero.")
Expand All @@ -499,14 +541,25 @@ public extension NeuralNet {

// Reserve space for serializing all validation set outputs
var validationOutputs = [Float](repeatElement(0, count: outputLength * batchSize * numBatches))

// Serialize all training labels into a single array
let trainLabels = data.trainLabels.reduce([], +)

// Also, calculate error on the training set to report that as well
var trainingOutputs = [Float](repeatElement(0, count: outputLength * batchSize * data.trainInputs.count))

// Train until the desired error threshold is met or the max number of epochs has been executed
var epochs = 1
while true {
// Complete one full training epoch
for (batchinputs, batchLabels) in zip(data.trainInputs, data.trainLabels) {
try infer(batchinputs)
for (batchIndex, (batchinputs, batchLabels)) in zip(data.trainInputs, data.trainLabels).enumerated() {
let outputs = try infer(batchinputs)
try backpropagate(batchLabels)

for i in 0..<batchOutputLength {
let idx = batchIndex * batchOutputLength + i
trainingOutputs[idx] = outputs[i]
}
}

// Perform inference on the full validation set
Expand All @@ -518,13 +571,16 @@ public extension NeuralNet {
validationOutputs[idx] = outputs[i]
}
}

let trainingError = errorFunction.computeError(real: trainingOutputs, target: trainLabels,
rows: batchSize * data.trainInputs.count, cols: outputLength)

// Calculate error on the whole validation set
let error = errorFunction.computeError(real: validationOutputs, target: validationLabels,
rows: batchSize * numBatches, cols: outputLength)

// Notify callback of a newly-completed epoch; halt training if requested
if let toContinue = epochCallback?(epochs, error), toContinue == false {
if let toContinue = epochCallback?(epochs, error, trainingError), toContinue == false {
return (epochs, error)
}

Expand Down
17 changes: 11 additions & 6 deletions Sources/Storage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ public extension NeuralNet {
static let hiddenActivationKey = "hiddenActivation"
static let outputActivationKey = "outputActivation"
static let weightsKey = "weights"
static let biasesKey = "biases"


/// Attempts to initialize a `NeuralNet` from a file stored at the given URL.
Expand All @@ -44,12 +45,13 @@ public extension NeuralNet {

// Read all required values from JSON
guard let layerNodeCounts = array[NeuralNet.layerNodeCountsKey] as? [Int],
let lr = array[NeuralNet.learningRateKey] as? Float,
let momentum = array[NeuralNet.momentumKey] as? Float,
let lr = array[NeuralNet.learningRateKey] as? Double,
let momentum = array[NeuralNet.momentumKey] as? Double,
let batchSize = array[NeuralNet.batchSizeKey] as? Int,
let hiddenActivationStr = array[NeuralNet.hiddenActivationKey] as? String,
let outputActivationStr = array[NeuralNet.outputActivationKey] as? String,
let weights = array[NeuralNet.weightsKey] as? [[Float]]
let weights = array[NeuralNet.weightsKey] as? [[Double]],
let biases = array[NeuralNet.biasesKey] as? [[Double]]
else {
throw Error.initialization("One or more required NeuralNet properties are missing.")
}
Expand Down Expand Up @@ -87,10 +89,12 @@ public extension NeuralNet {
// Recreate Structure object
let structure = try Structure(nodes: layerNodeCounts,
hiddenActivation: hiddenActivation, outputActivation: outputActivation,
batchSize: batchSize, learningRate: lr, momentum: momentum)
batchSize: batchSize, learningRate: Float(lr), momentum: Float(momentum))

// Initialize neural network
try self.init(structure: structure, weights: weights)
let floatWeights = weights.map { $0.map { Float($0) }}
let floatBiases = biases.map { $0.map { Float($0) }}
try self.init(structure: structure, weights: floatWeights, biases: floatBiases)
}


Expand All @@ -104,7 +108,8 @@ public extension NeuralNet {
NeuralNet.batchSizeKey : batchSize,
NeuralNet.hiddenActivationKey : hiddenActivation.stringValue(),
NeuralNet.outputActivationKey : outputActivation.stringValue(),
NeuralNet.weightsKey : allWeights()
NeuralNet.weightsKey : allWeights(),
NeuralNet.biasesKey : allBiases()
]

// Serialize array into JSON data
Expand Down