diff --git a/nn/data.js b/nn/data.js new file mode 100644 index 0000000..8b96108 --- /dev/null +++ b/nn/data.js @@ -0,0 +1,23 @@ +var Tensor = require('../tensor.js'); +var U = module.exports = {} + +U.toTensor = function (dim, flatArray) { + return new Tensor(dim).fromFlatArray(flatArray) +} + +U.tensor1d = function (array) { + return new Tensor([array.length]).fromFlatArray(array) +} + +U.loadData = function (data) { + var tensorData = [] + for (var i = 0; i < data.length; i++) { + var x = U.tensor1d(data[i].input) + var y = U.tensor1d(data[i].output) + tensorData.push({ + input: x, + output: y + }) + } + return tensorData +} diff --git a/nn/index.js b/nn/index.js index 313a119..0244972 100644 --- a/nn/index.js +++ b/nn/index.js @@ -20,7 +20,8 @@ var pooling = require('./networks/pooling.js'); var activation = require('./networks/activation.js'); var perceptron = require('./networks/perceptron.js'); var misc = require('./networks/misc.js'); +var data = require('./data.js'); module.exports = utils.mergeObjects(module.exports, - lifting, composition, lifted, linear, convolution, pooling, activation, perceptron, misc + lifting, composition, lifted, linear, convolution, pooling, activation, perceptron, misc, data ); \ No newline at end of file diff --git a/opt/optimize.js b/opt/optimize.js index 350fd11..6a6e9f3 100644 --- a/opt/optimize.js +++ b/opt/optimize.js @@ -25,7 +25,7 @@ function optimize(fn, options) { assert(rets.gradients !== undefined); assert(rets.parameters !== undefined); // TODO: assert that there exists a parameter for each gradient? - method(rets.gradients, rets.parameters, i); + var mrets = method(rets.gradients, rets.parameters, i); if (verbose) { console.log('[optimize] done iteration ' + (i+1) + '/' + iters); } @@ -78,6 +78,7 @@ function adTrain(fn, trainingData, lossFn, options) { var trainingDatum = trainingData[idx]; var rets = fn(trainingDatum.input); var loss = lossFn(rets.output, trainingDatum.output); + if (options.verbose) console.log(' loss=%d', loss.x); return { parameters: rets.parameters, loss: loss diff --git a/test/mlp.js.bak b/test/mlp.js.bak new file mode 100644 index 0000000..38fabc6 --- /dev/null +++ b/test/mlp.js.bak @@ -0,0 +1,44 @@ +var Tensor = require('../tensor'); +var ad = require('../ad'); +var nn = require('../nn'); +var opt = require('../opt'); + +var nInputs = 6 +var nHidden = [4, 4, 5] +var nOutput = 2 + +var net = nn.sequence([ + nn.linear(nInputs, nHidden[0]), + nn.sigmoid, + nn.linear(nHidden[0], nHidden[1]), + nn.sigmoid, + nn.linear(nHidden[1], nOutput), + nn.softmax +]) + +var data = [ + {input: [0.4, 0.5, 0.5, 0., 0., 0.], output: [1]}, + {input: [0.5, 0.3, 0.5, 0., 0., 0.], output: [1]}, + {input: [0.4, 0.5, 0.5, 0., 0., 0.], output: [1]}, + {input: [0., 0., 0.5, 0.3, 0.5, 0.], output: [0]}, + {input: [0., 0., 0.5, 0.4, 0.5, 0.], output: [0]}, + {input: [0., 0., 0.5, 0.5, 0.5, 0.], output: [0]} +] + +var trainingData = nn.loadData(data) + +console.log('nnTrain') +opt.nnTrain(net, trainingData, opt.classificationLoss, { + batchSize: 1, // batch 超過 3 就無法成功, why ? + iterations: 1000, + method: opt.sgd({ stepSize: 1, stepSizeDecay: 0.999 }), + verbose: true +}) + +console.log('predict') +// Predict class probabilities for new, unseen features +for (let i = 0; i < trainingData.length; i++) { + var input = trainingData[i].input + var probs = net.eval(input) + console.log('input=%j\noutput=%j', input, probs) +} diff --git a/test/mlpReg.js b/test/mlpReg.js new file mode 100644 index 0000000..a2bb309 --- /dev/null +++ b/test/mlpReg.js @@ -0,0 +1,43 @@ +var Tensor = require('../tensor'); +var ad = require('../ad'); +var nn = require('../nn'); +var opt = require('../opt'); + +var nInputs = 6 +var nHidden = [4, 4, 5] +var nOutput = 2 + +var net = nn.sequence([ + nn.linear(nInputs, nHidden[0]), + nn.sigmoid, + nn.linear(nHidden[0], nHidden[1]), + nn.sigmoid, + nn.linear(nHidden[1], nOutput), +]) + +var data = [ + {input: [0.4, 0.5, 0.5, 0., 0., 0.], output: [1, 0]}, + {input: [0.5, 0.3, 0.5, 0., 0., 0.], output: [1, 0]}, + {input: [0.4, 0.5, 0.5, 0., 0., 0.], output: [1, 0]}, + {input: [0., 0., 0.5, 0.3, 0.5, 0.], output: [0, 1]}, + {input: [0., 0., 0.5, 0.4, 0.5, 0.], output: [0, 1]}, + {input: [0., 0., 0.5, 0.5, 0.5, 0.], output: [0, 1]} +] + +var trainingData = nn.loadData(data) + +console.log('nnTrain') +opt.nnTrain(net, trainingData, opt.regressionLoss, { + batchSize: 1, // batch 超過 3 就無法成功, why ? + iterations: 1000, + method: opt.sgd({ stepSize: 1, stepSizeDecay: 0.999 }), + verbose: true +}) + +console.log('predict') +// Predict class probabilities for new, unseen features +for (let i = 0; i < trainingData.length; i++) { + var input = trainingData[i].input + var probs = net.eval(input) + console.log('input=%j\noutput=%j', input, probs) +} diff --git a/test/mlpXor.js b/test/mlpXor.js new file mode 100644 index 0000000..694ee67 --- /dev/null +++ b/test/mlpXor.js @@ -0,0 +1,38 @@ +var Tensor = require('../tensor'); +var ad = require('../ad'); +var nn = require('../nn'); +var opt = require('../opt'); + +var nInputs = 2 +var nHidden = 5 +var nClasses = 2 + +var net = nn.sequence([ + nn.linear(nInputs, nHidden), + nn.tanh, + nn.linear(nHidden, nClasses), + nn.softmax +]) + +var data = [{ input: [0, 0], output: [0] }, + { input: [0, 1], output: [1] }, + { input: [1, 0], output: [1] }, + { input: [1, 1], output: [0] }] + +var trainingData = nn.loadData(data) + +console.log('nnTrain') +opt.nnTrain(net, trainingData, opt.classificationLoss, { + batchSize: 2, + iterations: 500, + method: opt.sgd({ stepSize: 1, stepSizeDecay: 0.999 }), + verbose: true +}) + +console.log('predict') +// Predict class probabilities for new, unseen features +for (let i = 0; i < trainingData.length; i++) { + var input = trainingData[i].input + var probs = net.eval(input) + console.log('input=%j\nprobs=%j', input, probs) +}