diff --git a/NeuralNet.podspec b/NeuralNet.podspec new file mode 100644 index 0000000..a83ca84 --- /dev/null +++ b/NeuralNet.podspec @@ -0,0 +1,90 @@ +# +# Be sure to run `pod spec lint Toolbox.podspec' to ensure this is a +# valid spec and to remove all comments including this before submitting the spec. +# +# To learn more about Podspec attributes see https://docs.cocoapods.org/specification.html +# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/ +# + +Pod::Spec.new do |spec| + + spec.name = "NeuralNet" + spec.version = "0.0.1" + spec.summary = "An artificial neural network written in Swift" + + spec.description = "An artificial neural network written in Swift" + + spec.homepage = "https://github.com/Swift-AI/Swift-AI" + + spec.license = "MIT" + + spec.author = { "Collin Hundley" => "collinhundley@gmail.com" } + + # spec.platform = :ios + # spec.platform = :ios, "5.0" + + # When using multiple platforms + # spec.ios.deployment_target = "5.0" + # spec.osx.deployment_target = "10.7" + # spec.watchos.deployment_target = "2.0" + # spec.tvos.deployment_target = "9.0" + + + # ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # + # + # Specify the location from where the source should be retrieved. + # Supports git, hg, bzr, svn and HTTP. + # + + spec.source = { :git => "https://github.com/Swift-AI/NeuralNet.git" } + + + # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # + # + # CocoaPods is smart about how it includes source code. For source files + # giving a folder will include any swift, h, m, mm, c & cpp files. + # For header files it will include any header in the folder. + # Not including the public_header_files will make all headers public. + # + + spec.source_files = 'Sources/**/*.swift' + + # ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # + # + # A list of resources included with the Pod. These are copied into the + # target bundle with a build phase script. Anything else will be cleaned. + # You can preserve files from being cleaned, please don't preserve + # non-essential files like tests, examples and documentation. + # + + # spec.resource = "icon.png" + # spec.resources = "Resources/*.png" + + # spec.preserve_paths = "FilesToSave", "MoreFilesToSave" + + + # ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # + # + # Link your library with frameworks, or libraries. Libraries do not include + # the lib prefix of their name. + # + + # spec.framework = "SomeFramework" + # spec.frameworks = "SomeFramework", "AnotherFramework" + + # spec.library = "iconv" + # spec.libraries = "iconv", "xml2" + + + # ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― # + # + # If your library depends on compiler flags you can set them in the xcconfig hash + # where they will only apply to your library. If you depend on other Podspecs + # you can include multiple dependencies to ensure it works. + + # spec.requires_arc = true + + # spec.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" } + # spec.dependency "JSONKit", "~> 1.4" + +end diff --git a/Sources/Activation.swift b/Sources/Activation.swift index e76f024..349440c 100644 --- a/Sources/Activation.swift +++ b/Sources/Activation.swift @@ -302,7 +302,9 @@ public extension NeuralNet { // TODO break case .sigmoid: - result = zip(real, target).map{(-$0 * (1 - $0) * ($1 - $0))} + result = zip(real, target).map { (real: Float, target: Float) in + (-real * (1 - real) * (target - real)) + } case .softmax: vDSP_vsub(target, 1, real, 1, diff --git a/Sources/NeuralNet.swift b/Sources/NeuralNet.swift index 8933818..a9dd3f4 100644 --- a/Sources/NeuralNet.swift +++ b/Sources/NeuralNet.swift @@ -74,7 +74,7 @@ public final class NeuralNet { // MARK: Initialization - public init(structure: Structure, weights: [[Float]]? = nil) throws { + public init(structure: Structure, weights: [[Float]]? = nil, biases: [[Float]]? = nil) throws { // Initialize basic properties self.numLayers = structure.numLayers self.layerNodeCounts = structure.layerNodeCounts @@ -94,8 +94,30 @@ public final class NeuralNet { } else { randomizeWeights() } + + if let biases = biases { + try self.setBiases(biases) + } + } + + /// Creates a new neural net by copying another neural net and modifying the batch size + /// This is useful if you use a large batch size to train the model but then want to infer with a smaller batch size (for instance 1) + public init(neuralNet: NeuralNet, structure: Structure, batchSize: Int) throws { + self.numLayers = neuralNet.numLayers + self.layerNodeCounts = neuralNet.layerNodeCounts + self.batchSize = batchSize + self.hiddenActivation = neuralNet.hiddenActivation + self.outputActivation = neuralNet.outputActivation + self.learningRate = neuralNet.learningRate + self.momentumFactor = neuralNet.momentumFactor + self.adjustedLearningRate = neuralNet.adjustedLearningRate + + // Initialize computed properties and caches + self.cache = Cache(structure: structure) + + try self.setWeights(neuralNet.allWeights()) + self.cache.layerBiases = neuralNet.cache.layerBiases } - } @@ -118,6 +140,22 @@ public extension NeuralNet { public func allWeights() -> [[Float]] { return cache.layerWeights } + + /// Resets the network with the given biases (i.e. from a pre-trained network). + /// This change may safely be performed at any time. + /// + /// - Parameter weights: A 2D array of biases corresponding to each layer in the network. + public func setBiases(_ biases: [[Float]]) throws { + // TODO: ensure valid number of weights + + // Reset all weights in the network + cache.layerBiases = biases + } + + /// Returns an array of the network's current biases for each layer. + public func allBiases() -> [[Float]] { + return cache.layerBiases + } /// Randomizes all of the network's weights. fileprivate func randomizeWeights() { @@ -474,12 +512,16 @@ public extension NeuralNet { /// The handler must return a `Bool` indicating whether training should continue. /// If `false` is returned, the training routine will exit immediately and return. /// The user may implement this block to monitor the training progress, tune network parameters, - /// or perform any other logic desired. + /// or perform any other logic desired. NOTE: The validation error and training error are both + /// returned here, but the training error is taken before back propagation so may be slightly + /// lower than the actual number. In practice, this doesn't matter and is a performance + /// improvement, so you can probably ignore this fact. /// - Returns: The total number of training epochs performed, and the final validation error. /// - Throws: An error if invalid data is provided. Checks are performed in advance to avoid problems during the training cycle. + @discardableResult public func train(_ data: Dataset, maxEpochs: Int, errorThreshold: Float, errorFunction: ErrorFunction, - epochCallback: ((_ epoch: Int, _ error: Float) -> Bool)?) throws -> (epochs: Int, error: Float) { + epochCallback: ((_ epoch: Int, _ validationError: Float, _ trainingError: Float) -> Bool)?) throws -> (epochs: Int, error: Float) { // Ensure valid error threshold guard errorThreshold > 0 else { throw Error.train("Training error threshold must be greater than zero.") @@ -499,14 +541,25 @@ public extension NeuralNet { // Reserve space for serializing all validation set outputs var validationOutputs = [Float](repeatElement(0, count: outputLength * batchSize * numBatches)) + + // Serialize all training labels into a single array + let trainLabels = data.trainLabels.reduce([], +) + + // Also, calculate error on the training set to report that as well + var trainingOutputs = [Float](repeatElement(0, count: outputLength * batchSize * data.trainInputs.count)) // Train until the desired error threshold is met or the max number of epochs has been executed var epochs = 1 while true { // Complete one full training epoch - for (batchinputs, batchLabels) in zip(data.trainInputs, data.trainLabels) { - try infer(batchinputs) + for (batchIndex, (batchinputs, batchLabels)) in zip(data.trainInputs, data.trainLabels).enumerated() { + let outputs = try infer(batchinputs) try backpropagate(batchLabels) + + for i in 0..